+
diff --git a/app/assets/javascripts/ml/model_registry/routes/models/index/index.js b/app/assets/javascripts/ml/model_registry/routes/models/index/index.js
new file mode 100644
index 00000000000..d303d9716af
--- /dev/null
+++ b/app/assets/javascripts/ml/model_registry/routes/models/index/index.js
@@ -0,0 +1,3 @@
+import MlModelsIndex from './components/ml_models_index.vue';
+
+export default MlModelsIndex;
diff --git a/app/assets/javascripts/ml/model_registry/routes/models/index/translations.js b/app/assets/javascripts/ml/model_registry/routes/models/index/translations.js
new file mode 100644
index 00000000000..f0f45f9424e
--- /dev/null
+++ b/app/assets/javascripts/ml/model_registry/routes/models/index/translations.js
@@ -0,0 +1,3 @@
+import { s__ } from '~/locale';
+
+export const TITLE_LABEL = s__('MlExperimentTracking|Model registry');
diff --git a/app/assets/javascripts/pages/projects/ml/models/index/index.js b/app/assets/javascripts/pages/projects/ml/models/index/index.js
new file mode 100644
index 00000000000..62d326f43a5
--- /dev/null
+++ b/app/assets/javascripts/pages/projects/ml/models/index/index.js
@@ -0,0 +1,4 @@
+import { initSimpleApp } from '~/helpers/init_simple_app_helper';
+import MlModelsIndex from '~/ml/model_registry/routes/models/index';
+
+initSimpleApp('#js-index-ml-models', MlModelsIndex);
diff --git a/app/assets/javascripts/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs.vue b/app/assets/javascripts/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs.vue
index 57e3a97244e..ab9e6e092d9 100644
--- a/app/assets/javascripts/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs.vue
+++ b/app/assets/javascripts/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs.vue
@@ -116,6 +116,7 @@ export default {
unique: true,
symbol: '@',
token: UserToken,
+ dataType: 'user',
operators: OPERATORS_IS,
fetchPath: this.projectPath,
fetchUsers: Api.projectUsers.bind(Api),
@@ -127,6 +128,7 @@ export default {
unique: true,
symbol: '@',
token: UserToken,
+ dataType: 'user',
operators: OPERATORS_IS,
fetchPath: this.projectPath,
fetchUsers: Api.projectUsers.bind(Api),
diff --git a/app/controllers/projects/jobs_controller.rb b/app/controllers/projects/jobs_controller.rb
index 79ddcbf732d..4e0b304a2ee 100644
--- a/app/controllers/projects/jobs_controller.rb
+++ b/app/controllers/projects/jobs_controller.rb
@@ -21,7 +21,7 @@ class Projects::JobsController < Projects::ApplicationController
before_action :verify_proxy_request!, only: :proxy_websocket_authorize
before_action :push_job_log_jump_to_failures, only: [:show]
before_action :reject_if_build_artifacts_size_refreshing!, only: [:erase]
-
+ before_action :push_ai_build_failure_cause, only: [:show]
layout 'project'
feature_category :continuous_integration
@@ -258,4 +258,8 @@ class Projects::JobsController < Projects::ApplicationController
def push_job_log_jump_to_failures
push_frontend_feature_flag(:job_log_jump_to_failures, @project)
end
+
+ def push_ai_build_failure_cause
+ push_frontend_feature_flag(:ai_build_failure_cause, @project)
+ end
end
diff --git a/app/controllers/projects/ml/models_controller.rb b/app/controllers/projects/ml/models_controller.rb
index e404d9b42c2..77855b73cbd 100644
--- a/app/controllers/projects/ml/models_controller.rb
+++ b/app/controllers/projects/ml/models_controller.rb
@@ -8,9 +8,6 @@ module Projects
def index
@models = ::Projects::Ml::ModelFinder.new(@project).execute
-
- # TODO: Frontend rendering being added with https://gitlab.com/gitlab-org/gitlab/-/merge_requests/124833
- render html: ::Ml::ModelsIndexPresenter.new(@models).present
end
private
diff --git a/app/graphql/resolvers/ci/runner_job_count_resolver.rb b/app/graphql/resolvers/ci/runner_job_count_resolver.rb
new file mode 100644
index 00000000000..a43d3f3a100
--- /dev/null
+++ b/app/graphql/resolvers/ci/runner_job_count_resolver.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module Resolvers
+ module Ci
+ class RunnerJobCountResolver < BaseResolver
+ include Gitlab::Graphql::Authorize::AuthorizeResource
+
+ type GraphQL::Types::Int, null: true
+
+ authorize :read_runner
+ authorizes_object!
+
+ argument :statuses, [::Types::Ci::JobStatusEnum],
+ required: false,
+ description: 'Filter jobs by status.',
+ alpha: { milestone: '16.2' }
+
+ alias_method :runner, :object
+
+ def resolve(statuses: nil)
+ BatchLoader::GraphQL.for(runner.id).batch(key: [:job_count, statuses]) do |runner_ids, loader, _args|
+ counts_by_runner = calculate_job_count_per_runner(runner_ids, statuses)
+
+ runner_ids.each do |runner_id|
+ loader.call(runner_id, counts_by_runner[runner_id]&.count || 0)
+ end
+ end
+ end
+
+ private
+
+ def calculate_job_count_per_runner(runner_ids, statuses)
+ # rubocop: disable CodeReuse/ActiveRecord
+ builds_tbl = ::Ci::Build.arel_table
+ runners_tbl = ::Ci::Runner.arel_table
+ lateral_query = ::Ci::Build.select(1).where(builds_tbl['runner_id'].eq(runners_tbl['id']))
+ lateral_query = lateral_query.where(status: statuses) if statuses
+ # We limit to 1 above the JOB_COUNT_LIMIT to indicate that more items exist after JOB_COUNT_LIMIT
+ lateral_query = lateral_query.limit(::Types::Ci::RunnerType::JOB_COUNT_LIMIT + 1)
+ ::Ci::Runner.joins("JOIN LATERAL (#{lateral_query.to_sql}) builds_with_limit ON true")
+ .id_in(runner_ids)
+ .select(:id, Arel.star.count.as('count'))
+ .group(:id)
+ .index_by(&:id)
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+ end
+ end
+end
diff --git a/app/graphql/types/ci/runner_type.rb b/app/graphql/types/ci/runner_type.rb
index 160ee0234fa..2baf64ca663 100644
--- a/app/graphql/types/ci/runner_type.rb
+++ b/app/graphql/types/ci/runner_type.rb
@@ -59,7 +59,8 @@ module Types
deprecated: { reason: "Use field in `manager` object instead", milestone: '16.2' },
description: 'IP address of the runner.'
field :job_count, GraphQL::Types::Int, null: true,
- description: "Number of jobs processed by the runner (limited to #{JOB_COUNT_LIMIT}, plus one to indicate that more items exist)."
+ description: "Number of jobs processed by the runner (limited to #{JOB_COUNT_LIMIT}, plus one to indicate that more items exist).",
+ resolver: ::Resolvers::Ci::RunnerJobCountResolver
field :job_execution_status,
Types::Ci::RunnerJobExecutionStatusEnum,
null: true,
@@ -126,28 +127,6 @@ module Types
::MarkupHelper.markdown(object.maintenance_note, context.to_h.dup)
end
- def job_count
- BatchLoader::GraphQL.for(runner.id).batch(key: :job_count) do |runner_ids, loader, _args|
- # rubocop: disable CodeReuse/ActiveRecord
- # We limit to 1 above the JOB_COUNT_LIMIT to indicate that more items exist after JOB_COUNT_LIMIT
- builds_tbl = ::Ci::Build.arel_table
- runners_tbl = ::Ci::Runner.arel_table
- lateral_query = ::Ci::Build.select(1)
- .where(builds_tbl['runner_id'].eq(runners_tbl['id']))
- .limit(JOB_COUNT_LIMIT + 1)
- counts = ::Ci::Runner.joins("JOIN LATERAL (#{lateral_query.to_sql}) builds_with_limit ON true")
- .id_in(runner_ids)
- .select(:id, Arel.star.count.as('count'))
- .group(:id)
- .index_by(&:id)
- # rubocop: enable CodeReuse/ActiveRecord
-
- runner_ids.each do |runner_id|
- loader.call(runner_id, counts[runner_id]&.count || 0)
- end
- end
- end
-
def admin_url
Gitlab::Routing.url_helpers.admin_runner_url(runner) if can_admin_runners?
end
diff --git a/app/helpers/ci/jobs_helper.rb b/app/helpers/ci/jobs_helper.rb
index a7e1de173bd..991b1f4d74e 100644
--- a/app/helpers/ci/jobs_helper.rb
+++ b/app/helpers/ci/jobs_helper.rb
@@ -2,16 +2,16 @@
module Ci
module JobsHelper
- def jobs_data
+ def jobs_data(project, build)
{
- "endpoint" => project_job_path(@project, @build, format: :json),
- "project_path" => @project.full_path,
+ "endpoint" => project_job_path(project, build, format: :json),
+ "project_path" => project.full_path,
"artifact_help_url" => help_page_path('user/gitlab_com/index.md', anchor: 'gitlab-cicd'),
"deployment_help_url" => help_page_path('user/project/clusters/deploy_to_cluster.md', anchor: 'troubleshooting'),
- "runner_settings_url" => project_runners_path(@build.project, anchor: 'js-runners-settings'),
- "page_path" => project_job_path(@project, @build),
- "build_status" => @build.status,
- "build_stage" => @build.stage_name,
+ "runner_settings_url" => project_runners_path(build.project, anchor: 'js-runners-settings'),
+ "page_path" => project_job_path(project, build),
+ "build_status" => build.status,
+ "build_stage" => build.stage_name,
"log_state" => '',
"build_options" => javascript_build_options,
"retry_outdated_job_docs_url" => help_page_path('ci/pipelines/settings', anchor: 'retry-outdated-jobs')
diff --git a/app/models/ci/catalog/resource.rb b/app/models/ci/catalog/resource.rb
index 77cfe91ddd6..38603ddfe59 100644
--- a/app/models/ci/catalog/resource.rb
+++ b/app/models/ci/catalog/resource.rb
@@ -19,6 +19,8 @@ module Ci
delegate :avatar_path, :description, :name, :star_count, :forks_count, to: :project
+ enum state: { draft: 0, published: 1 }
+
def versions
project.releases.order_released_desc
end
diff --git a/app/models/concerns/commit_signature.rb b/app/models/concerns/commit_signature.rb
index 5dac3c7833a..5bdf6bb31bf 100644
--- a/app/models/concerns/commit_signature.rb
+++ b/app/models/concerns/commit_signature.rb
@@ -16,7 +16,8 @@ module CommitSignature
unverified_key: 4,
unknown_key: 5,
multiple_signatures: 6,
- revoked_key: 7
+ revoked_key: 7,
+ verified_system: 8
}
belongs_to :project, class_name: 'Project', foreign_key: 'project_id', optional: false
diff --git a/app/models/concerns/has_user_type.rb b/app/models/concerns/has_user_type.rb
index 9d4b8328e8d..2d0ff82e624 100644
--- a/app/models/concerns/has_user_type.rb
+++ b/app/models/concerns/has_user_type.rb
@@ -14,7 +14,7 @@ module HasUserType
migration_bot: 7,
security_bot: 8,
automation_bot: 9,
- security_policy_bot: 10, # Currently not in use. See https://gitlab.com/gitlab-org/gitlab/-/issues/384174
+ security_policy_bot: 10,
admin_bot: 11,
suggested_reviewers_bot: 12,
service_account: 13,
diff --git a/app/models/concerns/vulnerability_finding_helpers.rb b/app/models/concerns/vulnerability_finding_helpers.rb
index a5b69997900..e8a50497b20 100644
--- a/app/models/concerns/vulnerability_finding_helpers.rb
+++ b/app/models/concerns/vulnerability_finding_helpers.rb
@@ -59,6 +59,7 @@ module VulnerabilityFindingHelpers
evidence = Vulnerabilities::Finding::Evidence.new(data: report_finding.evidence.data) if report_finding.evidence
Vulnerabilities::Finding.new(finding_data).tap do |finding|
+ finding.uuid = security_finding.uuid
finding.location_fingerprint = report_finding.location.fingerprint
finding.vulnerability = vulnerability_for(security_finding.uuid)
finding.project = project
diff --git a/app/views/projects/commit/_verified_system_signature_badge.html.haml b/app/views/projects/commit/_verified_system_signature_badge.html.haml
new file mode 100644
index 00000000000..96ff26ecbd7
--- /dev/null
+++ b/app/views/projects/commit/_verified_system_signature_badge.html.haml
@@ -0,0 +1,5 @@
+- title = _('Verified commit')
+- description = _('This commit was created in the GitLab UI, and signed with a GitLab-verified signature.')
+- locals = { signature: signature, title: title, description: description, label: _('Verified'), variant: 'success' }
+
+= render partial: 'projects/commit/signature_badge', locals: locals
diff --git a/app/views/projects/jobs/show.html.haml b/app/views/projects/jobs/show.html.haml
index 5f249f693ff..b151c355b3e 100644
--- a/app/views/projects/jobs/show.html.haml
+++ b/app/views/projects/jobs/show.html.haml
@@ -7,4 +7,4 @@
= render_if_exists "shared/shared_runners_minutes_limit_flash_message"
-#js-job-page{ data: jobs_data }
+#js-job-page{ data: jobs_data(@project, @build) }
diff --git a/app/views/projects/ml/models/index.html.haml b/app/views/projects/ml/models/index.html.haml
new file mode 100644
index 00000000000..2caba2ae9be
--- /dev/null
+++ b/app/views/projects/ml/models/index.html.haml
@@ -0,0 +1,5 @@
+- breadcrumb_title s_('ModelRegistry|Model registry')
+- page_title s_('ModelRegistry|Model registry')
+- presenter = ::Ml::ModelsIndexPresenter.new(@models)
+
+#js-index-ml-models{ data: { view_model: presenter.present } }
diff --git a/config/application.rb b/config/application.rb
index 91a5f29de79..05f3a726e27 100644
--- a/config/application.rb
+++ b/config/application.rb
@@ -30,7 +30,6 @@ module Gitlab
# Rails 7.0
config.action_controller.raise_on_open_redirects = false
- config.action_controller.wrap_parameters_by_default = false
config.action_dispatch.default_headers = { "X-Frame-Options" => "SAMEORIGIN",
"X-XSS-Protection" => "1; mode=block",
"X-Content-Type-Options" => "nosniff",
diff --git a/config/feature_flags/development/group_analytics_dashboards.yml b/config/feature_flags/development/group_analytics_dashboards.yml
new file mode 100644
index 00000000000..55001b99452
--- /dev/null
+++ b/config/feature_flags/development/group_analytics_dashboards.yml
@@ -0,0 +1,8 @@
+---
+name: group_analytics_dashboards
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/125337
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/416970
+milestone: '16.2'
+type: development
+group: group::optimize
+default_enabled: false
diff --git a/config/initializers/wrap_parameters.rb b/config/initializers/wrap_parameters.rb
deleted file mode 100644
index 85b2d840618..00000000000
--- a/config/initializers/wrap_parameters.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-# frozen_string_literal: true
-
-# Be sure to restart your server when you modify this file.
-#
-# This file contains settings for ActionController::ParamsWrapper which
-# is enabled by default.
-
-# Enable parameter wrapping for JSON. You can disable this by setting :format to an empty array.
-ActiveSupport.on_load(:action_controller) do
- wrap_parameters format: [:json]
-end
-
-# Disable root element in JSON by default.
-ActiveSupport.on_load(:active_record) do
- self.include_root_in_json = false
-end
diff --git a/db/migrate/20230630170515_add_state_to_catalog_resources.rb b/db/migrate/20230630170515_add_state_to_catalog_resources.rb
new file mode 100644
index 00000000000..ff0a5fda6c9
--- /dev/null
+++ b/db/migrate/20230630170515_add_state_to_catalog_resources.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class AddStateToCatalogResources < Gitlab::Database::Migration[2.1]
+ DRAFT = 0
+
+ def change
+ add_column :catalog_resources, :state, :smallint, null: false, limit: 1, default: DRAFT
+ end
+end
diff --git a/db/post_migrate/20230703122803_create_vulnerability_uuid_type_migration_index.rb b/db/post_migrate/20230703122803_create_vulnerability_uuid_type_migration_index.rb
index 97bca0399c8..f5ac2c2d0b9 100644
--- a/db/post_migrate/20230703122803_create_vulnerability_uuid_type_migration_index.rb
+++ b/db/post_migrate/20230703122803_create_vulnerability_uuid_type_migration_index.rb
@@ -1,24 +1,11 @@
# frozen_string_literal: true
class CreateVulnerabilityUuidTypeMigrationIndex < Gitlab::Database::Migration[2.1]
- disable_ddl_transaction!
-
- INDEX_NAME = 'tmp_idx_vulns_on_converted_uuid'
- WHERE_CLAUSE = "uuid_convert_string_to_uuid = '00000000-0000-0000-0000-000000000000'::uuid"
-
def up
- add_concurrent_index(
- :vulnerability_occurrences,
- %i[id uuid],
- name: INDEX_NAME,
- where: WHERE_CLAUSE
- )
+ # no-op due to https://gitlab.com/gitlab-com/gl-infra/production/-/issues/15983
end
def down
- remove_concurrent_index_by_name(
- :vulnerability_occurrences,
- INDEX_NAME
- )
+ # no-op due to https://gitlab.com/gitlab-com/gl-infra/production/-/issues/15983
end
end
diff --git a/db/post_migrate/20230705115215_remove_user_details_provisioned_by_group_at_column.rb b/db/post_migrate/20230705115215_remove_user_details_provisioned_by_group_at_column.rb
new file mode 100644
index 00000000000..33844f6714e
--- /dev/null
+++ b/db/post_migrate/20230705115215_remove_user_details_provisioned_by_group_at_column.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class RemoveUserDetailsProvisionedByGroupAtColumn < Gitlab::Database::Migration[2.1]
+ enable_lock_retries!
+
+ def change
+ remove_column :user_details, :provisioned_by_group_at, :datetime_with_timezone
+ end
+end
diff --git a/db/schema_migrations/20230630170515 b/db/schema_migrations/20230630170515
new file mode 100644
index 00000000000..7c0f42235af
--- /dev/null
+++ b/db/schema_migrations/20230630170515
@@ -0,0 +1 @@
+838865785ae99586e6c4017b5c51dee845208501a66fd4a3890641be0c076dfc
\ No newline at end of file
diff --git a/db/schema_migrations/20230705115215 b/db/schema_migrations/20230705115215
new file mode 100644
index 00000000000..ec12c32117d
--- /dev/null
+++ b/db/schema_migrations/20230705115215
@@ -0,0 +1 @@
+3035da4873093dd7c8cee94398c3b4a70a696e1e765d102cfe657b53e3b65be9
\ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index 82c69de554c..402ad83514a 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -12931,7 +12931,8 @@ ALTER SEQUENCE bulk_imports_id_seq OWNED BY bulk_imports.id;
CREATE TABLE catalog_resources (
id bigint NOT NULL,
project_id bigint NOT NULL,
- created_at timestamp with time zone NOT NULL
+ created_at timestamp with time zone NOT NULL,
+ state smallint DEFAULT 0 NOT NULL
);
CREATE SEQUENCE catalog_resources_id_seq
@@ -23632,7 +23633,6 @@ CREATE TABLE user_details (
password_last_changed_at timestamp with time zone DEFAULT now() NOT NULL,
onboarding_step_url text,
discord text DEFAULT ''::text NOT NULL,
- provisioned_by_group_at timestamp with time zone,
enterprise_group_id bigint,
enterprise_group_associated_at timestamp with time zone,
CONSTRAINT check_245664af82 CHECK ((char_length(webauthn_xid) <= 100)),
@@ -33658,8 +33658,6 @@ CREATE INDEX tmp_idx_vuln_reads_where_dismissal_reason_null ON vulnerability_rea
CREATE INDEX tmp_idx_vulnerability_occurrences_on_id_where_report_type_7_99 ON vulnerability_occurrences USING btree (id) WHERE (report_type = ANY (ARRAY[7, 99]));
-CREATE INDEX tmp_idx_vulns_on_converted_uuid ON vulnerability_occurrences USING btree (id, uuid) WHERE (uuid_convert_string_to_uuid = '00000000-0000-0000-0000-000000000000'::uuid);
-
CREATE INDEX tmp_index_ci_job_artifacts_on_expire_at_where_locked_unknown ON ci_job_artifacts USING btree (expire_at, job_id) WHERE ((locked = 2) AND (expire_at IS NOT NULL));
CREATE INDEX tmp_index_cis_vulnerability_reads_on_id ON vulnerability_reads USING btree (id) WHERE (report_type = 7);
diff --git a/doc/administration/geo/replication/troubleshooting.md b/doc/administration/geo/replication/troubleshooting.md
index eb8fb1ee26c..5f88ca8aec8 100644
--- a/doc/administration/geo/replication/troubleshooting.md
+++ b/doc/administration/geo/replication/troubleshooting.md
@@ -1679,7 +1679,7 @@ Repository check failures on a Geo secondary site do not necessarily imply a rep
1. Find affected repositories as mentioned below, as well as their [logged errors](../../repository_checks.md#what-to-do-if-a-check-failed).
1. Try to diagnose specific `git fsck` errors. The range of possible errors is wide, try putting them into search engines.
-1. Test normal functions of the affected repositories. Pull from the secondary, view the files.
+1. Test typical functions of the affected repositories. Pull from the secondary, view the files.
1. Check if the primary site's copy of the repository has an identical `git fsck` error. If you are planning a failover, then consider prioritizing that the secondary site has the same information that the primary site has. Ensure you have a backup of the primary, and follow [planned failover guidelines](../disaster_recovery/planned_failover.md).
1. Push to the primary and check if the change gets replicated to the secondary site.
1. If replication is not automatically working, try to manually sync the repository.
diff --git a/doc/administration/reference_architectures/10k_users.md b/doc/administration/reference_architectures/10k_users.md
index c3cf7c599a3..fdb6302aa5b 100644
--- a/doc/administration/reference_architectures/10k_users.md
+++ b/doc/administration/reference_architectures/10k_users.md
@@ -2265,7 +2265,7 @@ Refer to [epic 6127](https://gitlab.com/groups/gitlab-org/-/epics/6127) for more
### Cluster topology
The following tables and diagram detail the hybrid environment using the same formats
-as the normal environment above.
+as the typical environment above.
First are the components that run in Kubernetes. These run across several node groups, although you can change
the overall makeup as desired as long as the minimum CPU and Memory requirements are observed.
diff --git a/doc/administration/reference_architectures/25k_users.md b/doc/administration/reference_architectures/25k_users.md
index 37571ed5771..4fd32061f73 100644
--- a/doc/administration/reference_architectures/25k_users.md
+++ b/doc/administration/reference_architectures/25k_users.md
@@ -2283,7 +2283,7 @@ Refer to [epic 6127](https://gitlab.com/groups/gitlab-org/-/epics/6127) for more
### Cluster topology
The following tables and diagram detail the hybrid environment using the same formats
-as the normal environment above.
+as the typical environment above.
First are the components that run in Kubernetes. These run across several node groups, although you can change
the overall makeup as desired as long as the minimum CPU and Memory requirements are observed.
diff --git a/doc/administration/reference_architectures/2k_users.md b/doc/administration/reference_architectures/2k_users.md
index 455b0fbafd1..fd45524c505 100644
--- a/doc/administration/reference_architectures/2k_users.md
+++ b/doc/administration/reference_architectures/2k_users.md
@@ -963,7 +963,7 @@ Refer to [epic 6127](https://gitlab.com/groups/gitlab-org/-/epics/6127) for more
### Cluster topology
The following tables and diagram detail the hybrid environment using the same formats
-as the normal environment above.
+as the typical environment above.
First are the components that run in Kubernetes. These run across several node groups, although you can change
the overall makeup as desired as long as the minimum CPU and Memory requirements are observed.
diff --git a/doc/administration/reference_architectures/3k_users.md b/doc/administration/reference_architectures/3k_users.md
index 6a7d9864376..c90bea97e62 100644
--- a/doc/administration/reference_architectures/3k_users.md
+++ b/doc/administration/reference_architectures/3k_users.md
@@ -2272,7 +2272,7 @@ Refer to [epic 6127](https://gitlab.com/groups/gitlab-org/-/epics/6127) for more
### Cluster topology
The following tables and diagram detail the hybrid environment using the same formats
-as the normal environment above.
+as the typical environment above.
First are the components that run in Kubernetes. These run across several node groups, although you can change
the overall makeup as desired as long as the minimum CPU and Memory requirements are observed.
diff --git a/doc/administration/reference_architectures/50k_users.md b/doc/administration/reference_architectures/50k_users.md
index 9a2c354f27c..1982ec64572 100644
--- a/doc/administration/reference_architectures/50k_users.md
+++ b/doc/administration/reference_architectures/50k_users.md
@@ -2282,7 +2282,7 @@ Refer to [epic 6127](https://gitlab.com/groups/gitlab-org/-/epics/6127) for more
### Cluster topology
The following tables and diagram detail the hybrid environment using the same formats
-as the normal environment above.
+as the typical environment above.
First are the components that run in Kubernetes. These run across several node groups, although you can change
the overall makeup as desired as long as the minimum CPU and Memory requirements are observed.
diff --git a/doc/administration/reference_architectures/5k_users.md b/doc/administration/reference_architectures/5k_users.md
index b0bc70aaf00..c3a19c31ca3 100644
--- a/doc/administration/reference_architectures/5k_users.md
+++ b/doc/administration/reference_architectures/5k_users.md
@@ -2240,7 +2240,7 @@ Refer to [epic 6127](https://gitlab.com/groups/gitlab-org/-/epics/6127) for more
### Cluster topology
The following tables and diagram detail the hybrid environment using the same formats
-as the normal environment above.
+as the typical environment above.
First are the components that run in Kubernetes. These run across several node groups, although you can change
the overall makeup as desired as long as the minimum CPU and Memory requirements are observed.
diff --git a/doc/api/database_migrations.md b/doc/api/database_migrations.md
new file mode 100644
index 00000000000..d7aea7ad57e
--- /dev/null
+++ b/doc/api/database_migrations.md
@@ -0,0 +1,33 @@
+---
+stage: Data Stores
+group: Database
+info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
+---
+
+# Database migrations API **(FREE SELF)**
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/123408) in GitLab 16.2.
+
+This API is for managing database migrations used in the development of GitLab.
+
+All methods require administrator authorization.
+
+## Mark a migration as successful
+
+Mark pending migrations as successfully executed to prevent them from being
+executed by the `db:migrate` tasks. Use this API to skip failing
+migrations after they are determined to be safe to skip.
+
+```plaintext
+POST /api/v4/admin/migrations/:version/mark
+```
+
+| Attribute | Type | Required | Description |
+|-----------------|----------------|----------|----------------------------------------------------------------------------------|
+| `version` | integer | yes | Version timestamp of the migration to be skipped |
+| `database` | string | no | The database name for which the migration is skipped. Defaults to `main`. |
+
+```shell
+curl --header "PRIVATE-TOKEN: " \
+ --url "https://gitlab.example.com/api/v4/admin/migrations/:version/mark"
+```
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index a299927c711..d4ada9791da 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -13160,7 +13160,6 @@ CI/CD variables for a project.
| `groups` | [`GroupConnection`](#groupconnection) | Groups the runner is associated with. For group runners only. (see [Connections](#connections)) |
| `id` | [`CiRunnerID!`](#cirunnerid) | ID of the runner. |
| `ipAddress` **{warning-solid}** | [`String`](#string) | **Deprecated** in 16.2. Use field in `manager` object instead. |
-| `jobCount` | [`Int`](#int) | Number of jobs processed by the runner (limited to 1000, plus one to indicate that more items exist). |
| `jobExecutionStatus` **{warning-solid}** | [`CiRunnerJobExecutionStatus`](#cirunnerjobexecutionstatus) | **Introduced** in 15.7. This feature is an Experiment. It can be changed or removed at any time. Job execution status of the runner. |
| `locked` | [`Boolean`](#boolean) | Indicates the runner is locked. |
| `maintenanceNote` | [`String`](#string) | Runner's maintenance notes. |
@@ -13186,6 +13185,18 @@ CI/CD variables for a project.
#### Fields with arguments
+##### `CiRunner.jobCount`
+
+Number of jobs processed by the runner (limited to 1000, plus one to indicate that more items exist).
+
+Returns [`Int`](#int).
+
+###### Arguments
+
+| Name | Type | Description |
+| ---- | ---- | ----------- |
+| `statuses` **{warning-solid}** | [`[CiJobStatus!]`](#cijobstatus) | **Introduced** in 16.2. This feature is an Experiment. It can be changed or removed at any time. Filter jobs by status. |
+
##### `CiRunner.jobs`
Jobs assigned to the runner. This field can only be resolved for one runner in any single request.
@@ -26727,6 +26738,7 @@ Verification status of a GPG or X.509 signature for a commit.
| `UNVERIFIED` | unverified verification status. |
| `UNVERIFIED_KEY` | unverified_key verification status. |
| `VERIFIED` | verified verification status. |
+| `VERIFIED_SYSTEM` | verified_system verification status. |
### `VisibilityLevelsEnum`
diff --git a/doc/api/projects.md b/doc/api/projects.md
index f633c1e2e2d..e4507499634 100644
--- a/doc/api/projects.md
+++ b/doc/api/projects.md
@@ -304,6 +304,10 @@ Keyset pagination supports only `order_by=id`. Other sorting options aren't avai
Get a list of visible projects owned by the given user. When accessed without
authentication, only public projects are returned.
+Prerequisite:
+
+- To view [certain attributes](https://gitlab.com/gitlab-org/gitlab/-/blob/520776fa8e5a11b8275b7c597d75246fcfc74c89/lib/api/entities/project.rb#L109-130), you must be an administrator or have the Owner role for the project.
+
NOTE:
Only the projects in the user's (specified in `user_id`) namespace are returned. Projects owned by the user in any group or subgroups are not returned. An empty list is returned if a profile is set to private.
diff --git a/doc/api/templates/gitlab_ci_ymls.md b/doc/api/templates/gitlab_ci_ymls.md
index 21e3400645e..69346f8ab3d 100644
--- a/doc/api/templates/gitlab_ci_ymls.md
+++ b/doc/api/templates/gitlab_ci_ymls.md
@@ -135,7 +135,7 @@ Example response:
```json
{
"name": "Ruby",
- "content": "# This file is a template, and might need editing before it works on your project.\n# To contribute improvements to CI/CD templates, please follow the Development guide at:\n# https://docs.gitlab.com/ee/development/cicd/templates.html\n# This specific template is located at:\n# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Ruby.gitlab-ci.yml\n\n# Official language image. Look for the different tagged releases at:\n# https://hub.docker.com/r/library/ruby/tags/\nimage: ruby:latest\n\n# Pick zero or more services to be used on all builds.\n# Only needed when using a docker container to run your tests in.\n# Check out: https://docs.gitlab.com/ee/ci/services/index.html\nservices:\n - mysql:latest\n - redis:latest\n - postgres:latest\n\nvariables:\n POSTGRES_DB: database_name\n\n# Cache gems in between builds\ncache:\n paths:\n - vendor/ruby\n\n# This is a basic example for a gem or script which doesn't use\n# services such as redis or postgres\nbefore_script:\n - ruby -v # Print out ruby version for debugging\n # Uncomment next line if your rails app needs a JS runtime:\n # - apt-get update -q \u0026\u0026 apt-get install nodejs -yqq\n - bundle config set --local deployment true # Install dependencies into ./vendor/ruby\n - bundle install -j $(nproc)\n\n# Optional - Delete if not using `rubocop`\nrubocop:\n script:\n - rubocop\n\nrspec:\n script:\n - rspec spec\n\nrails:\n variables:\n DATABASE_URL: \"postgresql://postgres:postgres@postgres:5432/$POSTGRES_DB\"\n script:\n - rails db:migrate\n - rails db:seed\n - rails test\n\n# This deploy job uses a simple deploy flow to Heroku, other providers, e.g. AWS Elastic Beanstalk\n# are supported too: https://github.com/travis-ci/dpl\ndeploy:\n stage: deploy\n environment: production\n script:\n - gem install dpl\n - dpl --provider=heroku --app=$HEROKU_APP_NAME --api-key=$HEROKU_PRODUCTION_KEY\n"
+ "content": "# This file is a template, and might need editing before it works on your project.\n# To contribute improvements to CI/CD templates, please follow the Development guide at:\n# https://docs.gitlab.com/ee/development/cicd/templates.html\n# This specific template is located at:\n# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Ruby.gitlab-ci.yml\n\n# Official language image. Look for the different tagged releases at:\n# https://hub.docker.com/r/library/ruby/tags/\nimage: ruby:latest\n\n# Pick zero or more services to be used on all builds.\n# Only needed when using a docker container to run your tests in.\n# Check out: https://docs.gitlab.com/ee/ci/services/index.html\nservices:\n - mysql:latest\n - redis:latest\n - postgres:latest\n\nvariables:\n POSTGRES_DB: database_name\n\n# Cache gems in between builds\ncache:\n paths:\n - vendor/ruby\n\n# This is a basic example for a gem or script which doesn't use\n# services such as redis or postgres\nbefore_script:\n - ruby -v # Print out ruby version for debugging\n # Uncomment next line if your rails app needs a JS runtime:\n # - apt-get update -q \u0026\u0026 apt-get install nodejs -yqq\n - bundle config set --local deployment true # Install dependencies into ./vendor/ruby\n - bundle install -j $(nproc)\n\n# Optional - Delete if not using `rubocop`\nrubocop:\n script:\n - rubocop\n\nrspec:\n script:\n - rspec spec\n\nrails:\n variables:\n DATABASE_URL: \"postgresql://postgres:postgres@postgres:5432/$POSTGRES_DB\"\n script:\n - rails db:migrate\n - rails db:seed\n - rails test\n\n# This deploy job uses a simple deploy flow to Heroku, other providers, for example, AWS Elastic Beanstalk\n# are supported too: https://github.com/travis-ci/dpl\ndeploy:\n stage: deploy\n environment: production\n script:\n - gem install dpl\n - dpl --provider=heroku --app=$HEROKU_APP_NAME --api-key=$HEROKU_PRODUCTION_KEY\n"
}
```
diff --git a/doc/architecture/blueprints/cells/cells-feature-backups.md b/doc/architecture/blueprints/cells/cells-feature-backups.md
index 23750750ca4..b5d5d7afdcf 100644
--- a/doc/architecture/blueprints/cells/cells-feature-backups.md
+++ b/doc/architecture/blueprints/cells/cells-feature-backups.md
@@ -25,9 +25,9 @@ and also Git repository data.
## 2. Data flow
-Each cell has a number of application databases to back up (e.g. `main`, and `ci`).
+Each cell has a number of application databases to back up (for example, `main`, and `ci`).
-Additionally, there may be cluster-wide metadata tables (e.g. `users` table)
+Additionally, there may be cluster-wide metadata tables (for example, `users` table)
which is directly accessible via PostgreSQL.
## 3. Proposal
diff --git a/doc/architecture/blueprints/cells/cells-feature-secrets.md b/doc/architecture/blueprints/cells/cells-feature-secrets.md
index 20260c89ccd..50ccf926b4d 100644
--- a/doc/architecture/blueprints/cells/cells-feature-secrets.md
+++ b/doc/architecture/blueprints/cells/cells-feature-secrets.md
@@ -25,10 +25,10 @@ GitLab has a lot of
[secrets](https://docs.gitlab.com/charts/installation/secrets.html) that needs
to be configured.
-Some secrets are for inter-component communication, e.g. `GitLab Shell secret`,
+Some secrets are for inter-component communication, for example, `GitLab Shell secret`,
and used only within a cell.
-Some secrets are used for features, e.g. `ci_jwt_signing_key`.
+Some secrets are used for features, for example, `ci_jwt_signing_key`.
## 2. Data flow
diff --git a/doc/architecture/blueprints/cells/index.md b/doc/architecture/blueprints/cells/index.md
index 6da99e0aa6a..067ddfa879e 100644
--- a/doc/architecture/blueprints/cells/index.md
+++ b/doc/architecture/blueprints/cells/index.md
@@ -190,7 +190,7 @@ information. For example:
by one of the Cells, and the results of that can be cached. We also need to implement
a mechanism for negative cache and cache eviction.
-1. **GraphQL and other ambigious endpoints.**
+1. **GraphQL and other ambiguous endpoints.**
Most endpoints have a unique sharding key: the organization, which directly
or indirectly (via a group or project) can be used to classify endpoints.
diff --git a/doc/architecture/blueprints/cells/proposal-stateless-router-with-buffering-requests.md b/doc/architecture/blueprints/cells/proposal-stateless-router-with-buffering-requests.md
index f352fea84b1..c1ca0c60dcd 100644
--- a/doc/architecture/blueprints/cells/proposal-stateless-router-with-buffering-requests.md
+++ b/doc/architecture/blueprints/cells/proposal-stateless-router-with-buffering-requests.md
@@ -429,7 +429,7 @@ sequenceDiagram
```
In this case the user is not on their "default organization" so their TODO
-counter will not include their normal todos. We may choose to highlight this in
+counter will not include their typical todos. We may choose to highlight this in
the UI somewhere. A future iteration may be able to fetch that for them from
their default organization.
diff --git a/doc/architecture/blueprints/cells/proposal-stateless-router-with-routes-learning.md b/doc/architecture/blueprints/cells/proposal-stateless-router-with-routes-learning.md
index aadc08016e3..3b3d481914f 100644
--- a/doc/architecture/blueprints/cells/proposal-stateless-router-with-routes-learning.md
+++ b/doc/architecture/blueprints/cells/proposal-stateless-router-with-routes-learning.md
@@ -452,7 +452,7 @@ sequenceDiagram
```
In this case the user is not on their "default organization" so their TODO
-counter will not include their normal todos. We may choose to highlight this in
+counter will not include their typical todos. We may choose to highlight this in
the UI somewhere. A future iteration may be able to fetch that for them from
their default organization.
diff --git a/doc/architecture/blueprints/clickhouse_ingestion_pipeline/index.md b/doc/architecture/blueprints/clickhouse_ingestion_pipeline/index.md
index 5bcf0ceeda4..66089085d0d 100644
--- a/doc/architecture/blueprints/clickhouse_ingestion_pipeline/index.md
+++ b/doc/architecture/blueprints/clickhouse_ingestion_pipeline/index.md
@@ -208,7 +208,7 @@ Gitlab::Database::Writer.config do |config|
# then backend-specific configurations hereafter
#
config.url = 'tcp://user:pwd@localhost:9000/database'
- # e.g. a serializer helps define how data travels over the wire
+ # for example, a serializer helps define how data travels over the wire
config.json_serializer = ClickHouse::Serializer::JsonSerializer
# ...
end
diff --git a/doc/architecture/blueprints/clickhouse_usage/index.md b/doc/architecture/blueprints/clickhouse_usage/index.md
index 0629756c5d9..3febb09f0bf 100644
--- a/doc/architecture/blueprints/clickhouse_usage/index.md
+++ b/doc/architecture/blueprints/clickhouse_usage/index.md
@@ -35,6 +35,10 @@ As ClickHouse has already been selected for use at GitLab, our main goal now is
1. Launch: Support ClickHouse-backed features for SaaS and self-managed.
1. Improve: Successfully scale our usage of ClickHouse.
+### Non-goals
+
+ClickHouse will not be packaged by default with self-managed GitLab, due to uncertain need, complexity, and lack of operational experience. We will still work to find the best possible way to enable users to use ClickHouse themselves if they desire, but it will not be on by default. [ClickHouse maintenance and cost](self_managed_costs_and_requirements/index.md) investigations revealed an uncertain cost impact to smaller instances, and at this time unknown nuance to managing ClickHouse. This means features that depend only on ClickHouse will not be available out of the box for self-managed users (as of end of 2022, the majority of revenue comes from self-managed), so new features researching the use of ClickHouse should be aware of the potential impacts to user adoption in the near-term, until a solution is viable.
+
## Proposals
The following are links to proposals in the form of blueprints that address technical challenges to using ClickHouse across a wide variety of features.
@@ -44,6 +48,20 @@ The following are links to proposals in the form of blueprints that address tech
1. [Abstraction layer](../clickhouse_read_abstraction_layer/index.md) for features to leverage both ClickHouse and PostgreSQL.
- What are the benefits and tradeoffs? For example, how would this impact our automated migration and query testing?
+### Product roadmap
+
+#### Near-term
+
+In the next 3 months (FY24 Q2) ClickHouse will be implemented by default only for SaaS on GitLab.com or manual enablement for self-managed instances. This is due to the uncertain costs and management requirements for self-managed instances. This near-term implementation will be used to develop best practices and strategy to direct self-managed users. This will also constantly shape our recommendations for self-managed instances that want to onboard ClickHouse early.
+
+#### Mid-term
+
+After we have formulated best practices of managing ClickHouse ourselves for GitLab.com, the plan for 3-9 months (FY24 2H) will be to offer supported recommendations for self-managed instances that want to run ClickHouse themselves or potentially to a ClickHouse cluster/VM we would manage for users. One proposal for self-managed users is to [create a proxy or abstraction layer](https://gitlab.com/groups/gitlab-org/-/epics/308) that would allow users to connect their self-managed instance to SaaS without additional effort. Another option would be to allow users to "Bring your own ClickHouse" similar to our [approach for Elasticsearch](../../../integration/advanced_search/elasticsearch.md#install-elasticsearch). For the features that require ClickHouse for optimal usage (Value Streams Dashboard, [Product Analytics](https://gitlab.com/groups/gitlab-org/-/epics/8921) and Observability), this will be the initial go-to-market action.
+
+#### Long-term
+
+We will work towards a packaged reference version of ClickHouse capable of being easily managed with minimal cost increases for self-managed users. We should be able to reliably instruct users on the management of ClickHouse and provide accurate costs for usage. This will mean any feature could depend on ClickHouse without decreasing end-user exposure.
+
## Best Practices
Best practices and guidelines for developing performant, secure, and scalable features using ClickHouse are located in the [ClickHouse developer documentation](../../../development/database/clickhouse/index.md).
diff --git a/doc/architecture/blueprints/code_search_with_zoekt/index.md b/doc/architecture/blueprints/code_search_with_zoekt/index.md
index 681782609ba..273d8da482c 100644
--- a/doc/architecture/blueprints/code_search_with_zoekt/index.md
+++ b/doc/architecture/blueprints/code_search_with_zoekt/index.md
@@ -33,7 +33,7 @@ GitLab code search functionality today is backed by Elasticsearch.
Elasticsearch has proven useful for other types of search (issues, merge
requests, comments and so-on) but is by design not a good choice for code
search where users expect matches to be precise (ie. no false positives) and
-flexible (e.g. support
+flexible (for example, support
[substring matching](https://gitlab.com/gitlab-org/gitlab/-/issues/325234)
and
[regexes](https://gitlab.com/gitlab-org/gitlab/-/issues/4175)). We have
diff --git a/doc/architecture/blueprints/container_registry_metadata_database/index.md b/doc/architecture/blueprints/container_registry_metadata_database/index.md
index b77aaf598e6..a538910f553 100644
--- a/doc/architecture/blueprints/container_registry_metadata_database/index.md
+++ b/doc/architecture/blueprints/container_registry_metadata_database/index.md
@@ -266,7 +266,7 @@ The expected registry behavior will be covered with integration tests by manipul
##### Latency
-Excessive latency on established connections is hard to detect and debug, as these might not raise an application error or network timeout in normal circumstances but usually precede them.
+Excessive latency on established connections is hard to detect and debug, as these might not raise an application error or network timeout in typical circumstances but usually precede them.
For this reason, the duration of database queries used to serve HTTP API requests should be instrumented using metrics, allowing the detection of unusual variations and trigger alarms accordingly before an excessive latency becomes a timeout or service unavailability.
diff --git a/doc/architecture/blueprints/container_registry_metadata_database_self_managed_rollout/index.md b/doc/architecture/blueprints/container_registry_metadata_database_self_managed_rollout/index.md
index 148478e171f..a73f6335218 100644
--- a/doc/architecture/blueprints/container_registry_metadata_database_self_managed_rollout/index.md
+++ b/doc/architecture/blueprints/container_registry_metadata_database_self_managed_rollout/index.md
@@ -180,14 +180,14 @@ is complete and the registry is ready to make full use of the metadata database.
For users with large registries and who are interested in the minimum possible
downtime, each step can be ran independently when the tool is passed the appropriate
flag. The user will first run the pre-import step while the registry is
-performing its normal workload. Once that has completed, and the user is ready
+performing its usual workload. Once that has completed, and the user is ready
to stop writes to the registry, the tag import step can be ran. As with the GitLab.com
migration, importing tags requires that the registry be offline or in
read-only mode. This step does the minimum possible work to achieve fast and
efficient tag imports and will always be the fastest of the three steps, reducing
the downtime component to a fraction of the total import time. The user can then
bring up the registry configured to use the metadata database. After that, the
-user is free to run the third step during normal registry operations. This step
+user is free to run the third step during standard registry operations. This step
makes any dangling blobs in common storage visible to the database and therefore
the online garbage collection process.
diff --git a/doc/architecture/blueprints/gitlab_agent_deployments/index.md b/doc/architecture/blueprints/gitlab_agent_deployments/index.md
index 00ab39a5cd4..798c8a3045d 100644
--- a/doc/architecture/blueprints/gitlab_agent_deployments/index.md
+++ b/doc/architecture/blueprints/gitlab_agent_deployments/index.md
@@ -301,8 +301,8 @@ If no resources are found, this is likely that the users have not embedded these
### Dependency graph
- GitLab frontend uses [Owner References](https://kubernetes.io/docs/concepts/overview/working-with-objects/owners-dependents/) to idenfity the dependencies between resources. These are embedded in resources as `metadata.ownerReferences` field.
-- For the resoruces that don't have owner references, we can use [Well-Known Labels, Annotations and Taints](https://kubernetes.io/docs/reference/labels-annotations-taints/) as complement. e.g. `EndpointSlice` doesn't have `metadata.ownerReferences`, but has `kubernetes.io/service-name` as a reference to the parent `Service` resource.
+- For the resoruces that don't have owner references, we can use [Well-Known Labels, Annotations and Taints](https://kubernetes.io/docs/reference/labels-annotations-taints/) as complement. for example, `EndpointSlice` doesn't have `metadata.ownerReferences`, but has `kubernetes.io/service-name` as a reference to the parent `Service` resource.
### Health status of resources
-- GitLab frontend computes the status summary from the fetched resources. Something similar to ArgoCD's [Resource Health](https://argo-cd.readthedocs.io/en/stable/operator-manual/health/) e.g. `Healthy`, `Progressing`, `Degraded` and `Suspended`. The formula is TBD.
+- GitLab frontend computes the status summary from the fetched resources. Something similar to ArgoCD's [Resource Health](https://argo-cd.readthedocs.io/en/stable/operator-manual/health/) for example, `Healthy`, `Progressing`, `Degraded` and `Suspended`. The formula is TBD.
diff --git a/doc/architecture/blueprints/gitlab_observability_backend/metrics/index.md b/doc/architecture/blueprints/gitlab_observability_backend/metrics/index.md
index 4dadd5af46f..ef629c2410e 100644
--- a/doc/architecture/blueprints/gitlab_observability_backend/metrics/index.md
+++ b/doc/architecture/blueprints/gitlab_observability_backend/metrics/index.md
@@ -52,7 +52,7 @@ The aforementioned goals can further be broken down into the following four sub-
NOTE:
Although remote_write_sender does not test the correctness of a remote write receiver itself as is our case, it does bring some inspiration to implement/develop one within the scope of this project.
-- We aim to also ensure compatibility for special Prometheus data types, e.g. Prometheus histogram(s), summary(s).
+- We aim to also ensure compatibility for special Prometheus data types, for example, Prometheus histogram(s), summary(s).
#### Reading data
@@ -82,13 +82,13 @@ Worth noting that we intend to model exemplars the same way we’re modeling met
## Proposal
-We intend to use GitLab Observability Backend as a framework for the Metrics implementation so that its lifecycle is also managed via already existing Kubernetes controllers e.g. scheduler, tenant-operator.
+We intend to use GitLab Observability Backend as a framework for the Metrics implementation so that its lifecycle is also managed via already existing Kubernetes controllers for example, scheduler, tenant-operator.

From a development perspective, what’s been marked as our “Application Server” above needs to be developed as a part of this proposal while the remaining peripheral components either already exist or can be provisioned via existing code in `scheduler`/`tenant-operator`.
-**On the write path**, we expect to receive incoming data via `HTTP`/`gRPC` `Ingress` similar to what we do for our existing services, e.g. errortracking, tracing.
+**On the write path**, we expect to receive incoming data via `HTTP`/`gRPC` `Ingress` similar to what we do for our existing services, for example, errortracking, tracing.
NOTE:
Additionally, since we intend to ingest data via Prometheus `remote_write` API, the received data will be Protobuf-encoded, Snappy-compressed. All received data therefore needs to be decompressed & decoded to turn it into a set of `prompb.TimeSeries` objects, which the rest of our components interact with.
@@ -546,7 +546,7 @@ value: 0
On the read path, we first query all timeseries identifiers by searching for the labels under consideration. Once we have all the `series_id`(s), we then look up all corresponding samples between the query start timestamp and end timestamp.
-For e.g.
+For example:
```plaintext
kernel{service_environment=~"prod.*", measurement="boot_time"}
diff --git a/doc/architecture/blueprints/modular_monolith/bounded_contexts.md b/doc/architecture/blueprints/modular_monolith/bounded_contexts.md
index ee8678cff8d..0f71e24864e 100644
--- a/doc/architecture/blueprints/modular_monolith/bounded_contexts.md
+++ b/doc/architecture/blueprints/modular_monolith/bounded_contexts.md
@@ -51,7 +51,7 @@ These gems as still part of the monorepo.
From the research in [Proposal: split GitLab monolith into components](https://gitlab.com/gitlab-org/gitlab/-/issues/365293)
it seems that following [product categories](https://about.gitlab.com/handbook/product/categories/#hierarchy), as a guideline,
-would be much better than translating organization structure into folder structure (e.g. `app/modules/verify/pipeline-execution/...`).
+would be much better than translating organization structure into folder structure (for example, `app/modules/verify/pipeline-execution/...`).
However, this guideline alone is not sufficient and we need a more specific strategy:
@@ -78,7 +78,7 @@ Start with listing all the Ruby files in a spreadsheet and categorize them into
Some of them are already pretty explicit like Ci::, Packages::, etc. Components should follow our
[existing naming guide](../../../development/software_design.md#use-namespaces-to-define-bounded-contexts).
-This could be a short-lived Working Group with representative members of each DevOps stage (e.g. Senior+ engineers).
+This could be a short-lived Working Group with representative members of each DevOps stage (for example, Senior+ engineers).
The WG would help defining high-level components and will be the DRIs for driving the changes in their respective DevOps stage.
### 3. Publish the list of bounded contexts
@@ -107,13 +107,13 @@ With this static list we could:
## Glossary
- `modules` are Ruby modules and can be used to nest code hierarchically.
-- `namespaces` are unique hierarchies of Ruby constants. E.g. `Ci::` but also `Ci::JobArtifacts::` or `Ci::Pipeline::Chain::`.
+- `namespaces` are unique hierarchies of Ruby constants. For example, `Ci::` but also `Ci::JobArtifacts::` or `Ci::Pipeline::Chain::`.
- `packages` are Packwerk packages to group together related functionalities. These packages can be big or small depending on the design and architecture. Inside a package all constants (classes and modules) have the same namespace. For example:
- In a package `ci`, all the classes would be nested under `Ci::` namespace. There can be also nested namespaces like `Ci::PipelineProcessing::`.
- In a package `ci-pipeline_creation` all classes are nested under `Ci::PipelineCreation`, like `Ci::PipelineCreation::Chain::Command`.
- In a package `ci` a class named `MergeRequests::UpdateHeadPipelineService` would not be allowed because it would not match the package's namespace.
- This can be enforced easily with [Packwerk's based Rubocop Cops](https://github.com/rubyatscale/rubocop-packs/blob/main/lib/rubocop/cop/packs/root_namespace_is_pack_name.rb).
- `bounded context` is a top-level Packwerk package that represents a macro aspect of the domain. For example: `Ci::`, `MergeRequests::`, `Packages::`, etc.
- - A bounded context is represented by a single Ruby module/namespace. E.g. `Ci::` and not `Ci::JobArtifacts::`.
+ - A bounded context is represented by a single Ruby module/namespace. For example, `Ci::` and not `Ci::JobArtifacts::`.
- A bounded context can be made of 1 or multiple Packwerk packages. Nested packages would be recommended if the domain is quite complex and we want to enforce privacy among all the implementation details. For example: `Ci::PipelineProcessing::` and `Ci::PipelineCreation::` could be separate packages of the same bounded context and expose their public API while keeping implementation details private.
- A new bounded context like `RemoteDevelopment::` can be represented a single package while large and complex bounded contexts like `Ci::` would need to be organized into smaller/nested packages.
diff --git a/doc/architecture/blueprints/object_pools/index.md b/doc/architecture/blueprints/object_pools/index.md
index d14e11b8d36..7b7f8d7d180 100644
--- a/doc/architecture/blueprints/object_pools/index.md
+++ b/doc/architecture/blueprints/object_pools/index.md
@@ -805,10 +805,10 @@ pools as it will always match the contents of the upstream repository.
It has a number of downsides though:
-- Normal repositories can now have different states, where some of the
+- Repositories can now have different states, where some of the
repositories are allowed to prune objects and others aren't. This introduces a
source of uncertainty and makes it easy to accidentally delete objects in a
- normal repository and thus corrupt its forks.
+ repository and thus corrupt its forks.
- When upstream repositories go private we must stop updating objects which are
supposed to be deduplicated across members of the fork network. This means
diff --git a/doc/architecture/blueprints/observability_tracing/index.md b/doc/architecture/blueprints/observability_tracing/index.md
index 07c80a2b074..4291683f83f 100644
--- a/doc/architecture/blueprints/observability_tracing/index.md
+++ b/doc/architecture/blueprints/observability_tracing/index.md
@@ -81,7 +81,7 @@ One data ingestion pipeline will be deployed for each top level GitLab namespace
- Beyond rate limits, resource limits can be enforced per user such that no user can steal more system resources (memory, cpu) than allocated.
- Fine grained control of horizontal scaling for each user pipeline by adding more OTEL Collector instances
-- Manage the users tenant in accordance to GitLab subscription tier, e.g. quota, throughput, cold storage, shard to different databases
+- Manage the users tenant in accordance to GitLab subscription tier, for example, quota, throughput, cold storage, shard to different databases
- Reduced complexity and enhanced security in the pipeline by leveraging off the shelf components like the [OpenTelemetry Collector](https://opentelemetry.io/docs/concepts/components/#collector) where data within that collector belongs to no more than a single user/customer.
A pipeline is only deployed for the user upon enabling observability in the project settings, in the same way a user can enable error tracking for their project. When observability is enabled for any project in the users namespace, a pipeline will be deployed. This deployment is automated by our Kubernetes scheduler-operator and tenant-operator. Provisioning is currently managed through the iframe, but a preferred method would be to provision using a RESTful API. The GitLab UI would have a section in project settings that allow a user to "enable observability", much like they do for error tracking today.
diff --git a/doc/ci/runners/saas/linux_saas_runner.md b/doc/ci/runners/saas/linux_saas_runner.md
index c340a2b0f89..95917bbc300 100644
--- a/doc/ci/runners/saas/linux_saas_runner.md
+++ b/doc/ci/runners/saas/linux_saas_runner.md
@@ -81,7 +81,7 @@ If you want to [contribute to GitLab](https://about.gitlab.com/community/contrib
`gitlab-shared-runners-manager-X.gitlab.com` fleet of runners, dedicated for GitLab projects and related community forks.
These runners are backed by the same machine type as our `small` runners.
-Unlike the normal SaaS runners on Linux, each virtual machine is re-used up to 40 times.
+Unlike the most commonly used SaaS runners on Linux, each virtual machine is re-used up to 40 times.
As we want to encourage people to contribute, these runners are free of charge.
diff --git a/doc/development/auto_devops.md b/doc/development/auto_devops.md
index 1f98a37ac9d..ccbad7f7314 100644
--- a/doc/development/auto_devops.md
+++ b/doc/development/auto_devops.md
@@ -41,7 +41,7 @@ Some jobs use images that are built from external projects:
[`auto-deploy-app`](https://gitlab.com/gitlab-org/cluster-integration/auto-deploy-image/-/tree/master/assets/auto-deploy-app) is used to deploy.
There are extra variables that get passed to the CI jobs when Auto
-DevOps is enabled that are not present in a normal CI job. These can be
+DevOps is enabled that are not present in a typical CI job. These can be
found in
[`ProjectAutoDevops`](https://gitlab.com/gitlab-org/gitlab/-/blob/bf69484afa94e091c3e1383945f60dbe4e8681af/app/models/project_auto_devops.rb).
diff --git a/doc/development/database/efficient_in_operator_queries.md b/doc/development/database/efficient_in_operator_queries.md
index 03a1c442255..a0c71f49e2d 100644
--- a/doc/development/database/efficient_in_operator_queries.md
+++ b/doc/development/database/efficient_in_operator_queries.md
@@ -117,7 +117,7 @@ On average, we can say the following:
From the list, it's apparent that the number of `issues` records has
the largest impact on the performance.
-As per normal usage, we can say that the number of issue records grows
+As per typical usage, we can say that the number of issue records grows
at a faster rate than the `namespaces` and the `projects` records.
This problem affects most of our group-level features where records are listed
diff --git a/doc/development/database/keyset_pagination.md b/doc/development/database/keyset_pagination.md
index 42d7458b45a..ff8038ee24c 100644
--- a/doc/development/database/keyset_pagination.md
+++ b/doc/development/database/keyset_pagination.md
@@ -159,7 +159,7 @@ configuration is necessary:
- Function-based ordering.
- Ordering with a custom tie-breaker column, like `iid`.
-These order objects can be defined in the model classes as normal ActiveRecord scopes, there is no special behavior that prevents using these scopes elsewhere (Kaminari, background jobs).
+These order objects can be defined in the model classes as standard ActiveRecord scopes, there is no special behavior that prevents using these scopes elsewhere (Kaminari, background jobs).
### `NULLS LAST` ordering
diff --git a/doc/development/database/loose_foreign_keys.md b/doc/development/database/loose_foreign_keys.md
index 91a22d8c26b..fd380bee385 100644
--- a/doc/development/database/loose_foreign_keys.md
+++ b/doc/development/database/loose_foreign_keys.md
@@ -370,7 +370,7 @@ end
```
This endpoint still works when the parent `Project` model is deleted. This can be considered a
-a data leak which should not happen under normal circumstances:
+a data leak which should not happen under typical circumstances:
```ruby
def show
@@ -723,7 +723,7 @@ timeout or a worker crash, the next job continues the processing.
### Accumulation of deleted records
There can be cases where the workers need to process an unusually large amount of data. This can
-happen under normal usage, for example when a large project or group is deleted. In this scenario,
+happen under typical usage, for example when a large project or group is deleted. In this scenario,
there can be several million rows to be deleted or nullified. Due to the limits enforced by the
worker, processing this data takes some time.
diff --git a/doc/development/database/pagination_guidelines.md b/doc/development/database/pagination_guidelines.md
index 04a2a8cdf9c..d6550d0a515 100644
--- a/doc/development/database/pagination_guidelines.md
+++ b/doc/development/database/pagination_guidelines.md
@@ -214,7 +214,7 @@ Limit (cost=137878.89..137881.65 rows=20 width=1309) (actual time=5523.588..552
(8 rows)
```
-We can argue that a normal user does not visit these pages, however, API users could easily navigate to very high page numbers (scraping, collecting data).
+We can argue that a typical user does not visit these pages, however, API users could easily navigate to very high page numbers (scraping, collecting data).
### Keyset pagination
diff --git a/doc/development/fe_guide/frontend_faq.md b/doc/development/fe_guide/frontend_faq.md
index 995730796b4..ab75cc27b6a 100644
--- a/doc/development/fe_guide/frontend_faq.md
+++ b/doc/development/fe_guide/frontend_faq.md
@@ -163,7 +163,7 @@ Sometimes it's necessary to test locally what the frontend production build woul
The production build takes a few minutes to be completed. Any code changes at this point are
displayed only after executing the item 3 above again.
-To return to the normal development mode:
+To return to the standard development mode:
1. Open `gitlab.yaml` located in your `gitlab` installation folder, scroll down to the `webpack` section and change back `dev_server` to `enabled: true`.
1. Run `yarn clean` to remove the production assets and free some space (optional).
diff --git a/doc/development/pipelines/internals.md b/doc/development/pipelines/internals.md
index 53f07d48eef..83ab63e5812 100644
--- a/doc/development/pipelines/internals.md
+++ b/doc/development/pipelines/internals.md
@@ -20,7 +20,8 @@ Pipelines are always created for the following scenarios:
Pipeline creation is also affected by the following CI/CD variables:
-- If `$FORCE_GITLAB_CI` is set, pipelines are created.
+- If `$FORCE_GITLAB_CI` is set, pipelines are created. Not recommended to use.
+ See [Avoid `$FORCE_GITLAB_CI`](#avoid-force_gitlab_ci).
- If `$GITLAB_INTERNAL` is not set, pipelines are not created.
No pipeline is created in any other cases (for example, when pushing a branch with no
@@ -28,6 +29,24 @@ MR for it).
The source of truth for these workflow rules is defined in [`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
+### Avoid `$FORCE_GITLAB_CI`
+
+The pipeline is very complex and we need to clearly understand the kind of
+pipeline we want to trigger. We need to know which jobs we should run and
+which ones we shouldn't.
+
+If we use `$FORCE_GITLAB_CI` to force trigger a pipeline,
+we don't really know what kind of pipeline it is. The result can be that we don't
+run the jobs we want, or we run too many jobs we don't care about.
+
+Some more context and background can be found at:
+[Avoid blanket changes to avoid unexpected run](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/102881)
+
+Here's a list of where we're using this right now, and should try to move away
+from using `$FORCE_GITLAB_CI`.
+
+- [JiHu validation pipeline](https://about.gitlab.com/handbook/ceo/chief-of-staff-team/jihu-support/jihu-validation-pipelines.html)
+
## Default image
The default image is defined in [`.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab-ci.yml).
diff --git a/doc/user/clusters/agent/troubleshooting.md b/doc/user/clusters/agent/troubleshooting.md
index 9c0733d66b7..f1e85a70fb3 100644
--- a/doc/user/clusters/agent/troubleshooting.md
+++ b/doc/user/clusters/agent/troubleshooting.md
@@ -11,7 +11,7 @@ When you are using the GitLab agent for Kubernetes, you might experience issues
You can start by viewing the service logs:
```shell
-kubectl logs -f -l=app=gitlab-agent -n gitlab-agent
+kubectl logs -f -l=app.kubernetes.io/name=gitlab-agent -n gitlab-agent
```
If you are a GitLab administrator, you can also view the [GitLab agent server logs](../../../administration/clusters/kas.md#troubleshooting).
diff --git a/doc/user/usage_quotas.md b/doc/user/usage_quotas.md
index 5c6c64a3485..d119044930a 100644
--- a/doc/user/usage_quotas.md
+++ b/doc/user/usage_quotas.md
@@ -181,7 +181,7 @@ Storage types that add to the total namespace storage are:
- Git repository
- Git LFS
-- Artifacts
+- Job artifacts
- Container registry
- Package registry
- Dependency proxy
diff --git a/gems/gitlab-schema-validation/.gitignore b/gems/gitlab-schema-validation/.gitignore
new file mode 100644
index 00000000000..b04a8c840df
--- /dev/null
+++ b/gems/gitlab-schema-validation/.gitignore
@@ -0,0 +1,11 @@
+/.bundle/
+/.yardoc
+/_yardoc/
+/coverage/
+/doc/
+/pkg/
+/spec/reports/
+/tmp/
+
+# rspec failure tracking
+.rspec_status
diff --git a/gems/gitlab-schema-validation/.gitlab-ci.yml b/gems/gitlab-schema-validation/.gitlab-ci.yml
new file mode 100644
index 00000000000..03db9e02b30
--- /dev/null
+++ b/gems/gitlab-schema-validation/.gitlab-ci.yml
@@ -0,0 +1,4 @@
+include:
+ - local: gems/gem.gitlab-ci.yml
+ inputs:
+ gem_name: "gitlab-schema-validation"
diff --git a/gems/gitlab-schema-validation/.rspec b/gems/gitlab-schema-validation/.rspec
new file mode 100644
index 00000000000..34c5164d9b5
--- /dev/null
+++ b/gems/gitlab-schema-validation/.rspec
@@ -0,0 +1,3 @@
+--format documentation
+--color
+--require spec_helper
diff --git a/gems/gitlab-schema-validation/.rubocop.yml b/gems/gitlab-schema-validation/.rubocop.yml
new file mode 100644
index 00000000000..6b3942f803e
--- /dev/null
+++ b/gems/gitlab-schema-validation/.rubocop.yml
@@ -0,0 +1,5 @@
+inherit_from:
+ - ../config/rubocop.yml
+
+AllCops:
+ NewCops: enable
diff --git a/gems/gitlab-schema-validation/Gemfile b/gems/gitlab-schema-validation/Gemfile
new file mode 100644
index 00000000000..3fa25adbee1
--- /dev/null
+++ b/gems/gitlab-schema-validation/Gemfile
@@ -0,0 +1,6 @@
+# frozen_string_literal: true
+
+source "https://rubygems.org"
+
+# Specify your gem's dependencies in gitlab-schema-validation.gemspec
+gemspec
diff --git a/gems/gitlab-schema-validation/Gemfile.lock b/gems/gitlab-schema-validation/Gemfile.lock
new file mode 100644
index 00000000000..70a69081f66
--- /dev/null
+++ b/gems/gitlab-schema-validation/Gemfile.lock
@@ -0,0 +1,126 @@
+PATH
+ remote: .
+ specs:
+ gitlab-schema-validation (0.1.0)
+
+GEM
+ remote: https://rubygems.org/
+ specs:
+ activesupport (7.0.6)
+ concurrent-ruby (~> 1.0, >= 1.0.2)
+ i18n (>= 1.6, < 2)
+ minitest (>= 5.1)
+ tzinfo (~> 2.0)
+ ast (2.4.2)
+ benchmark-malloc (0.2.0)
+ benchmark-perf (0.6.0)
+ benchmark-trend (0.4.0)
+ binding_of_caller (1.0.0)
+ debug_inspector (>= 0.0.1)
+ coderay (1.1.3)
+ concurrent-ruby (1.2.2)
+ debug_inspector (1.1.0)
+ diff-lcs (1.5.0)
+ gitlab-styles (10.1.0)
+ rubocop (~> 1.50.2)
+ rubocop-graphql (~> 0.18)
+ rubocop-performance (~> 1.15)
+ rubocop-rails (~> 2.17)
+ rubocop-rspec (~> 2.22)
+ i18n (1.14.1)
+ concurrent-ruby (~> 1.0)
+ json (2.6.3)
+ minitest (5.18.1)
+ parallel (1.23.0)
+ parser (3.2.2.3)
+ ast (~> 2.4.1)
+ racc
+ proc_to_ast (0.1.0)
+ coderay
+ parser
+ unparser
+ racc (1.7.1)
+ rack (3.0.8)
+ rainbow (3.1.1)
+ regexp_parser (2.8.1)
+ rexml (3.2.5)
+ rspec (3.12.0)
+ rspec-core (~> 3.12.0)
+ rspec-expectations (~> 3.12.0)
+ rspec-mocks (~> 3.12.0)
+ rspec-benchmark (0.6.0)
+ benchmark-malloc (~> 0.2)
+ benchmark-perf (~> 0.6)
+ benchmark-trend (~> 0.4)
+ rspec (>= 3.0)
+ rspec-core (3.12.2)
+ rspec-support (~> 3.12.0)
+ rspec-expectations (3.12.3)
+ diff-lcs (>= 1.2.0, < 2.0)
+ rspec-support (~> 3.12.0)
+ rspec-mocks (3.12.5)
+ diff-lcs (>= 1.2.0, < 2.0)
+ rspec-support (~> 3.12.0)
+ rspec-parameterized (1.0.0)
+ rspec-parameterized-core (< 2)
+ rspec-parameterized-table_syntax (< 2)
+ rspec-parameterized-core (1.0.0)
+ parser
+ proc_to_ast
+ rspec (>= 2.13, < 4)
+ unparser
+ rspec-parameterized-table_syntax (1.0.0)
+ binding_of_caller
+ rspec-parameterized-core (< 2)
+ rspec-support (3.12.1)
+ rubocop (1.50.2)
+ json (~> 2.3)
+ parallel (~> 1.10)
+ parser (>= 3.2.0.0)
+ rainbow (>= 2.2.2, < 4.0)
+ regexp_parser (>= 1.8, < 3.0)
+ rexml (>= 3.2.5, < 4.0)
+ rubocop-ast (>= 1.28.0, < 2.0)
+ ruby-progressbar (~> 1.7)
+ unicode-display_width (>= 2.4.0, < 3.0)
+ rubocop-ast (1.29.0)
+ parser (>= 3.2.1.0)
+ rubocop-capybara (2.18.0)
+ rubocop (~> 1.41)
+ rubocop-factory_bot (2.23.1)
+ rubocop (~> 1.33)
+ rubocop-graphql (0.19.0)
+ rubocop (>= 0.87, < 2)
+ rubocop-performance (1.18.0)
+ rubocop (>= 1.7.0, < 2.0)
+ rubocop-ast (>= 0.4.0)
+ rubocop-rails (2.20.2)
+ activesupport (>= 4.2.0)
+ rack (>= 1.1)
+ rubocop (>= 1.33.0, < 2.0)
+ rubocop-rspec (2.22.0)
+ rubocop (~> 1.33)
+ rubocop-capybara (~> 2.17)
+ rubocop-factory_bot (~> 2.22)
+ ruby-progressbar (1.13.0)
+ tzinfo (2.0.6)
+ concurrent-ruby (~> 1.0)
+ unicode-display_width (2.4.2)
+ unparser (0.6.8)
+ diff-lcs (~> 1.3)
+ parser (>= 3.2.0)
+
+PLATFORMS
+ ruby
+
+DEPENDENCIES
+ gitlab-schema-validation!
+ gitlab-styles (~> 10.1.0)
+ rspec (~> 3.0)
+ rspec-benchmark (~> 0.6.0)
+ rspec-parameterized (~> 1.0)
+ rubocop (~> 1.50)
+ rubocop-rspec (~> 2.22)
+
+BUNDLED WITH
+ 2.4.14
diff --git a/gems/gitlab-schema-validation/gitlab-schema-validation.gemspec b/gems/gitlab-schema-validation/gitlab-schema-validation.gemspec
new file mode 100644
index 00000000000..a3007580d84
--- /dev/null
+++ b/gems/gitlab-schema-validation/gitlab-schema-validation.gemspec
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require_relative "lib/gitlab/schema/validation/version"
+
+Gem::Specification.new do |spec|
+ spec.name = "gitlab-schema-validation"
+ spec.version = Gitlab::Schema::Validation::Version::VERSION
+ spec.authors = ["group::database"]
+ spec.email = ["engineering@gitlab.com"]
+
+ spec.summary = "Schema validation framework"
+ spec.description = "Compares the differences between a structure.sql file and a database
+ and reports the inconsistencies."
+ spec.homepage = "https://gitlab.com/gitlab-org/gitlab/-/tree/master/gems/gitlab-schema-validation"
+ spec.license = "MIT"
+ spec.required_ruby_version = ">= 3.0"
+ spec.metadata["rubygems_mfa_required"] = "true"
+
+ spec.files = Dir['lib/**/*.rb']
+ spec.require_paths = ["lib"]
+
+ spec.add_development_dependency "gitlab-styles", "~> 10.1.0"
+ spec.add_development_dependency "rspec", "~> 3.0"
+ spec.add_development_dependency "rspec-benchmark", "~> 0.6.0"
+ spec.add_development_dependency "rspec-parameterized", "~> 1.0"
+ spec.add_development_dependency "rubocop", "~> 1.50"
+ spec.add_development_dependency "rubocop-rspec", "~> 2.22"
+end
diff --git a/gems/gitlab-schema-validation/lib/gitlab/schema/validation.rb b/gems/gitlab-schema-validation/lib/gitlab/schema/validation.rb
new file mode 100644
index 00000000000..41e88f21d67
--- /dev/null
+++ b/gems/gitlab-schema-validation/lib/gitlab/schema/validation.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require_relative "validation/version"
+
+module Gitlab
+ module Schema
+ module Validation
+ end
+ end
+end
diff --git a/gems/gitlab-schema-validation/lib/gitlab/schema/validation/version.rb b/gems/gitlab-schema-validation/lib/gitlab/schema/validation/version.rb
new file mode 100644
index 00000000000..40220578c97
--- /dev/null
+++ b/gems/gitlab-schema-validation/lib/gitlab/schema/validation/version.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Schema
+ module Validation
+ module Version
+ VERSION = "0.1.0"
+ end
+ end
+ end
+end
diff --git a/gems/gitlab-schema-validation/spec/gitlab/schema/validation_spec.rb b/gems/gitlab-schema-validation/spec/gitlab/schema/validation_spec.rb
new file mode 100644
index 00000000000..f4a06abab48
--- /dev/null
+++ b/gems/gitlab-schema-validation/spec/gitlab/schema/validation_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+RSpec.describe Gitlab::Schema::Validation do
+ it "has a version number" do
+ expect(Gitlab::Schema::Validation::Version::VERSION).not_to be_nil
+ end
+end
diff --git a/gems/gitlab-schema-validation/spec/spec_helper.rb b/gems/gitlab-schema-validation/spec/spec_helper.rb
new file mode 100644
index 00000000000..c2def02dd79
--- /dev/null
+++ b/gems/gitlab-schema-validation/spec/spec_helper.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require "gitlab/schema/validation"
+
+RSpec.configure do |config|
+ # Enable flags like --only-failures and --next-failure
+ config.example_status_persistence_file_path = ".rspec_status"
+
+ # Disable RSpec exposing methods globally on `Module` and `main`
+ config.disable_monkey_patching!
+
+ config.expect_with :rspec do |c|
+ c.syntax = :expect
+ end
+end
diff --git a/lib/gitlab/gitaly_client/commit_service.rb b/lib/gitlab/gitaly_client/commit_service.rb
index 33557c6e236..c10f780665c 100644
--- a/lib/gitlab/gitaly_client/commit_service.rb
+++ b/lib/gitlab/gitaly_client/commit_service.rb
@@ -531,14 +531,24 @@ module Gitlab
request = Gitaly::GetCommitSignaturesRequest.new(repository: @gitaly_repo, commit_ids: commit_ids)
response = gitaly_client_call(@repository.storage, :commit_service, :get_commit_signatures, request, timeout: GitalyClient.fast_timeout)
- signatures = Hash.new { |h, k| h[k] = [+''.b, +''.b] }
+ signatures = Hash.new do |h, k|
+ h[k] = {
+ signature: +''.b,
+ signed_text: +''.b,
+ signer: :SIGNER_UNSPECIFIED
+ }
+ end
+
current_commit_id = nil
response.each do |message|
current_commit_id = message.commit_id if message.commit_id.present?
- signatures[current_commit_id].first << message.signature
- signatures[current_commit_id].last << message.signed_text
+ signatures[current_commit_id][:signature] << message.signature
+ signatures[current_commit_id][:signed_text] << message.signed_text
+
+ # The actual value is send once. All the other chunks send SIGNER_UNSPECIFIED
+ signatures[current_commit_id][:signer] = message.signer unless message.signer == :SIGNER_UNSPECIFIED
end
signatures
diff --git a/lib/gitlab/gpg/commit.rb b/lib/gitlab/gpg/commit.rb
index a03aeb9c293..1fc95181767 100644
--- a/lib/gitlab/gpg/commit.rb
+++ b/lib/gitlab/gpg/commit.rb
@@ -87,6 +87,7 @@ module Gitlab
end
def verification_status(gpg_key)
+ return :verified_system if verified_by_gitlab?
return :multiple_signatures if multiple_signatures?
return :unknown_key unless gpg_key
return :unverified_key unless gpg_key.verified?
@@ -101,6 +102,15 @@ module Gitlab
end
end
+ # If a commit is signed by Gitaly, the Gitaly returns `SIGNER_SYSTEM` as a signer
+ # In order to calculate it, the signature is Verified using the Gitaly's public key:
+ # https://gitlab.com/gitlab-org/gitaly/-/blob/v16.2.0-rc2/internal/gitaly/service/commit/commit_signatures.go#L63
+ #
+ # It is safe to skip verification step if the commit has been signed by Gitaly
+ def verified_by_gitlab?
+ signer == :SIGNER_SYSTEM
+ end
+
def user_infos(gpg_key)
gpg_key&.verified_user_infos&.first || gpg_key&.user_infos&.first || {}
end
diff --git a/lib/gitlab/signed_commit.rb b/lib/gitlab/signed_commit.rb
index 410e71f51a1..be6592dd231 100644
--- a/lib/gitlab/signed_commit.rb
+++ b/lib/gitlab/signed_commit.rb
@@ -34,13 +34,19 @@ module Gitlab
def signature_text
strong_memoize(:signature_text) do
- @signature_data.itself ? @signature_data[0] : nil
+ @signature_data.itself ? @signature_data[:signature] : nil
end
end
def signed_text
strong_memoize(:signed_text) do
- @signature_data.itself ? @signature_data[1] : nil
+ @signature_data.itself ? @signature_data[:signed_text] : nil
+ end
+ end
+
+ def signer
+ strong_memoize(:signer) do
+ @signature_data.itself ? @signature_data[:signer] : nil
end
end
diff --git a/lib/gitlab/ssh/commit.rb b/lib/gitlab/ssh/commit.rb
index d9ac8c1b881..7d7cc529b1a 100644
--- a/lib/gitlab/ssh/commit.rb
+++ b/lib/gitlab/ssh/commit.rb
@@ -10,7 +10,7 @@ module Gitlab
end
def attributes
- signature = ::Gitlab::Ssh::Signature.new(signature_text, signed_text, @commit.committer_email)
+ signature = ::Gitlab::Ssh::Signature.new(signature_text, signed_text, signer, @commit.committer_email)
{
commit_sha: @commit.sha,
diff --git a/lib/gitlab/ssh/signature.rb b/lib/gitlab/ssh/signature.rb
index 763d89116f1..6b0cab75557 100644
--- a/lib/gitlab/ssh/signature.rb
+++ b/lib/gitlab/ssh/signature.rb
@@ -11,15 +11,17 @@ module Gitlab
GIT_NAMESPACE = 'git'
- def initialize(signature_text, signed_text, committer_email)
+ def initialize(signature_text, signed_text, signer, committer_email)
@signature_text = signature_text
@signed_text = signed_text
+ @signer = signer
@committer_email = committer_email
end
def verification_status
strong_memoize(:verification_status) do
next :unverified unless all_attributes_present?
+ next :verified_system if verified_by_gitlab?
next :unverified unless valid_signature_blob?
next :unknown_key unless signed_by_key
next :other_user unless committer
@@ -81,6 +83,15 @@ module Gitlab
nil
end
end
+
+ # If a commit is signed by Gitaly, the Gitaly returns `SIGNER_SYSTEM` as a signer
+ # In order to calculate it, the signature is Verified using the Gitaly's public key:
+ # https://gitlab.com/gitlab-org/gitaly/-/blob/v16.2.0-rc2/internal/gitaly/service/commit/commit_signatures.go#L63
+ #
+ # It is safe to skip verification step if the commit has been signed by Gitaly
+ def verified_by_gitlab?
+ @signer == :SIGNER_SYSTEM
+ end
end
end
end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 47d97df25db..d4f3c649df6 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -12623,15 +12623,36 @@ msgstr ""
msgid "ContributionEvent|Approved merge request %{targetLink} in %{resourceParentLink}."
msgstr ""
+msgid "ContributionEvent|Deleted branch %{refLink} in %{resourceParentLink}."
+msgstr ""
+
+msgid "ContributionEvent|Deleted tag %{refLink} in %{resourceParentLink}."
+msgstr ""
+
msgid "ContributionEvent|Joined project %{resourceParentLink}."
msgstr ""
msgid "ContributionEvent|Left project %{resourceParentLink}."
msgstr ""
+msgid "ContributionEvent|Pushed a new branch %{refLink} in %{resourceParentLink}."
+msgstr ""
+
+msgid "ContributionEvent|Pushed a new tag %{refLink} in %{resourceParentLink}."
+msgstr ""
+
+msgid "ContributionEvent|Pushed to branch %{refLink} in %{resourceParentLink}."
+msgstr ""
+
+msgid "ContributionEvent|Pushed to tag %{refLink} in %{resourceParentLink}."
+msgstr ""
+
msgid "ContributionEvent|Removed due to membership expiration from %{resourceParentLink}."
msgstr ""
+msgid "ContributionEvent|…and %{count} more commits. %{linkStart}Compare%{linkEnd}."
+msgstr ""
+
msgid "Contributions for %{calendar_date}"
msgstr ""
@@ -19788,6 +19809,9 @@ msgstr ""
msgid "Generate project access tokens scoped to this project for your applications that need access to the GitLab API."
msgstr ""
+msgid "Generate root cause analysis"
+msgstr ""
+
msgid "Generate site and private keys at"
msgstr ""
@@ -26201,6 +26225,9 @@ msgstr ""
msgid "Jobs|Raw text search is not currently supported for the jobs filtered search feature. Please use the available search tokens."
msgstr ""
+msgid "Jobs|Root cause analysis"
+msgstr ""
+
msgid "Jobs|There was a problem fetching the failed jobs."
msgstr ""
@@ -29368,6 +29395,9 @@ msgstr ""
msgid "MlExperimentTracking|Model experiments"
msgstr ""
+msgid "MlExperimentTracking|Model registry"
+msgstr ""
+
msgid "MlExperimentTracking|Name"
msgstr ""
@@ -29404,6 +29434,9 @@ msgstr ""
msgid "Model experiments"
msgstr ""
+msgid "ModelRegistry|Model registry"
+msgstr ""
+
msgid "Modified"
msgstr ""
@@ -39160,6 +39193,12 @@ msgstr ""
msgid "Rollout of free user limits within GitLab.com. Do not edit these values unless approval has been given via %{link_start}this issue%{link_end}."
msgstr ""
+msgid "Root cause analysis"
+msgstr ""
+
+msgid "Root cause analysis is a feature that analyzes your logs to determine why a job may have failed and the potential ways to fix it. To generate this analysis, we may share information in your job logs with %{linkStart}Third-Party AI providers%{linkEnd}. Before initiating this analysis, please do not include in your logs any information that could impact the security or privacy of your account."
+msgstr ""
+
msgid "Ruby"
msgstr ""
@@ -46640,6 +46679,9 @@ msgstr ""
msgid "This commit is part of merge request %{link_to_merge_request}. Comments created here will be created in the context of that merge request."
msgstr ""
+msgid "This commit was created in the GitLab UI, and signed with a GitLab-verified signature."
+msgstr ""
+
msgid "This commit was signed with a %{strong_open}verified%{strong_close} signature and the committer email is verified to belong to the same user."
msgstr ""
@@ -49092,6 +49134,9 @@ msgstr ""
msgid "UsageQuota|Pipelines"
msgstr ""
+msgid "UsageQuota|Project storage included in %{planName} subscription"
+msgstr ""
+
msgid "UsageQuota|Recalculate repository usage"
msgstr ""
@@ -51300,6 +51345,9 @@ msgstr ""
msgid "What is repository mirroring?"
msgstr ""
+msgid "What is root cause analysis?"
+msgstr ""
+
msgid "What is squashing?"
msgstr ""
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index b22b64bcf82..8235c3e4194 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -836,6 +836,7 @@ export const mockTokens = (fetchLabels, fetchUsers, fetchMilestones, isSignedIn)
type: TOKEN_TYPE_ASSIGNEE,
operators: OPERATORS_IS_NOT,
token: UserToken,
+ dataType: 'user',
unique: true,
fetchUsers,
preloadedUsers: [],
@@ -847,6 +848,7 @@ export const mockTokens = (fetchLabels, fetchUsers, fetchMilestones, isSignedIn)
operators: OPERATORS_IS_NOT,
symbol: '@',
token: UserToken,
+ dataType: 'user',
unique: true,
fetchUsers,
preloadedUsers: [],
diff --git a/spec/frontend/contribution_events/components/contribution_event/contribution_event_pushed_spec.js b/spec/frontend/contribution_events/components/contribution_event/contribution_event_pushed_spec.js
new file mode 100644
index 00000000000..43f201040e3
--- /dev/null
+++ b/spec/frontend/contribution_events/components/contribution_event/contribution_event_pushed_spec.js
@@ -0,0 +1,141 @@
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import ContributionEventPushed from '~/contribution_events/components/contribution_event/contribution_event_pushed.vue';
+import ContributionEventBase from '~/contribution_events/components/contribution_event/contribution_event_base.vue';
+import ResourceParentLink from '~/contribution_events/components/resource_parent_link.vue';
+import {
+ eventPushedNewBranch,
+ eventPushedNewTag,
+ eventPushedBranch,
+ eventPushedTag,
+ eventPushedRemovedBranch,
+ eventPushedRemovedTag,
+ eventBulkPushedBranch,
+} from '../../utils';
+
+describe('ContributionEventPushed', () => {
+ let wrapper;
+
+ const createComponent = ({ propsData }) => {
+ wrapper = mountExtended(ContributionEventPushed, {
+ propsData,
+ });
+ };
+
+ describe.each`
+ event | expectedMessage | expectedIcon
+ ${eventPushedNewBranch()} | ${'Pushed a new branch'} | ${'commit'}
+ ${eventPushedNewTag()} | ${'Pushed a new tag'} | ${'commit'}
+ ${eventPushedBranch()} | ${'Pushed to branch'} | ${'commit'}
+ ${eventPushedTag()} | ${'Pushed to tag'} | ${'commit'}
+ ${eventPushedRemovedBranch()} | ${'Deleted branch'} | ${'remove'}
+ ${eventPushedRemovedTag()} | ${'Deleted tag'} | ${'remove'}
+ `('when event is $event', ({ event, expectedMessage, expectedIcon }) => {
+ beforeEach(() => {
+ createComponent({ propsData: { event } });
+ });
+
+ it('renders `ContributionEventBase` with correct props', () => {
+ expect(wrapper.findComponent(ContributionEventBase).props()).toMatchObject({
+ event,
+ iconName: expectedIcon,
+ });
+ });
+
+ it('renders message', () => {
+ expect(wrapper.findByTestId('event-body').text()).toContain(expectedMessage);
+ });
+
+ it('renders resource parent link', () => {
+ expect(wrapper.findComponent(ResourceParentLink).props('event')).toEqual(event);
+ });
+ });
+
+ describe('when ref has a path', () => {
+ const event = eventPushedNewBranch();
+ const path = '/foo';
+
+ beforeEach(() => {
+ createComponent({
+ propsData: {
+ event: {
+ ...event,
+ ref: {
+ ...event.ref,
+ path,
+ },
+ },
+ },
+ });
+ });
+
+ it('renders ref link', () => {
+ expect(wrapper.findByRole('link', { name: event.ref.name }).attributes('href')).toBe(path);
+ });
+ });
+
+ describe('when ref does not have a path', () => {
+ const event = eventPushedRemovedBranch();
+
+ beforeEach(() => {
+ createComponent({
+ propsData: {
+ event,
+ },
+ });
+ });
+
+ it('renders ref name without a link', () => {
+ expect(wrapper.findByRole('link', { name: event.ref.name }).exists()).toBe(false);
+ expect(wrapper.findByText(event.ref.name).exists()).toBe(true);
+ });
+ });
+
+ it('renders renders a link to the commit', () => {
+ const event = eventPushedNewBranch();
+ createComponent({
+ propsData: {
+ event,
+ },
+ });
+
+ expect(
+ wrapper.findByRole('link', { name: event.commit.truncated_sha }).attributes('href'),
+ ).toBe(event.commit.path);
+ });
+
+ it('renders commit title', () => {
+ const event = eventPushedNewBranch();
+ createComponent({
+ propsData: {
+ event,
+ },
+ });
+
+ expect(wrapper.findByText(event.commit.title).exists()).toBe(true);
+ });
+
+ describe('when multiple commits are pushed', () => {
+ const event = eventBulkPushedBranch();
+ beforeEach(() => {
+ createComponent({
+ propsData: {
+ event,
+ },
+ });
+ });
+
+ it('renders message', () => {
+ expect(wrapper.text()).toContain('…and 4 more commits.');
+ });
+
+ it('renders compare link', () => {
+ expect(
+ wrapper
+ .findByRole('link', {
+ name: `Compare ${event.commit.from_truncated_sha}…${event.commit.to_truncated_sha}`,
+ })
+ .attributes('href'),
+ ).toBe(event.commit.compare_path);
+ });
+ });
+});
diff --git a/spec/frontend/contribution_events/components/contribution_events_spec.js b/spec/frontend/contribution_events/components/contribution_events_spec.js
index 064799d4a82..a1d2570f2f6 100644
--- a/spec/frontend/contribution_events/components/contribution_events_spec.js
+++ b/spec/frontend/contribution_events/components/contribution_events_spec.js
@@ -1,11 +1,11 @@
-import events from 'test_fixtures/controller/users/activity.json';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ContributionEvents from '~/contribution_events/components/contribution_events.vue';
import ContributionEventApproved from '~/contribution_events/components/contribution_event/contribution_event_approved.vue';
import ContributionEventExpired from '~/contribution_events/components/contribution_event/contribution_event_expired.vue';
import ContributionEventJoined from '~/contribution_events/components/contribution_event/contribution_event_joined.vue';
import ContributionEventLeft from '~/contribution_events/components/contribution_event/contribution_event_left.vue';
-import { eventApproved, eventExpired, eventJoined, eventLeft } from '../utils';
+import ContributionEventPushed from '~/contribution_events/components/contribution_event/contribution_event_pushed.vue';
+import { eventApproved, eventExpired, eventJoined, eventLeft, eventPushedBranch } from '../utils';
describe('ContributionEvents', () => {
let wrapper;
@@ -13,7 +13,7 @@ describe('ContributionEvents', () => {
const createComponent = () => {
wrapper = shallowMountExtended(ContributionEvents, {
propsData: {
- events,
+ events: [eventApproved(), eventExpired(), eventJoined(), eventLeft(), eventPushedBranch()],
},
});
};
@@ -24,6 +24,7 @@ describe('ContributionEvents', () => {
${ContributionEventExpired} | ${eventExpired()}
${ContributionEventJoined} | ${eventJoined()}
${ContributionEventLeft} | ${eventLeft()}
+ ${ContributionEventPushed} | ${eventPushedBranch()}
`(
'renders `$expectedComponent.name` component and passes expected event',
({ expectedComponent, expectedEvent }) => {
diff --git a/spec/frontend/contribution_events/utils.js b/spec/frontend/contribution_events/utils.js
index 736479fce04..c9ef2ff2c3e 100644
--- a/spec/frontend/contribution_events/utils.js
+++ b/spec/frontend/contribution_events/utils.js
@@ -4,6 +4,9 @@ import {
EVENT_TYPE_EXPIRED,
EVENT_TYPE_JOINED,
EVENT_TYPE_LEFT,
+ EVENT_TYPE_PUSHED,
+ PUSH_EVENT_REF_TYPE_BRANCH,
+ PUSH_EVENT_REF_TYPE_TAG,
} from '~/contribution_events/constants';
const findEventByAction = (action) => events.find((event) => event.action === action);
@@ -15,3 +18,29 @@ export const eventExpired = () => findEventByAction(EVENT_TYPE_EXPIRED);
export const eventJoined = () => findEventByAction(EVENT_TYPE_JOINED);
export const eventLeft = () => findEventByAction(EVENT_TYPE_LEFT);
+
+const findPushEvent = ({
+ isNew = false,
+ isRemoved = false,
+ refType = PUSH_EVENT_REF_TYPE_BRANCH,
+ commitCount = 1,
+} = {}) => () =>
+ events.find(
+ ({ action, ref, commit }) =>
+ action === EVENT_TYPE_PUSHED &&
+ ref.is_new === isNew &&
+ ref.is_removed === isRemoved &&
+ ref.type === refType &&
+ commit.count === commitCount,
+ );
+
+export const eventPushedNewBranch = findPushEvent({ isNew: true });
+export const eventPushedNewTag = findPushEvent({ isNew: true, refType: PUSH_EVENT_REF_TYPE_TAG });
+export const eventPushedBranch = findPushEvent();
+export const eventPushedTag = findPushEvent({ refType: PUSH_EVENT_REF_TYPE_TAG });
+export const eventPushedRemovedBranch = findPushEvent({ isRemoved: true });
+export const eventPushedRemovedTag = findPushEvent({
+ isRemoved: true,
+ refType: PUSH_EVENT_REF_TYPE_TAG,
+});
+export const eventBulkPushedBranch = findPushEvent({ commitCount: 5 });
diff --git a/spec/frontend/jobs/components/job/job_app_spec.js b/spec/frontend/jobs/components/job/job_app_spec.js
index 394fc8ad43c..c925131dd9c 100644
--- a/spec/frontend/jobs/components/job/job_app_spec.js
+++ b/spec/frontend/jobs/components/job/job_app_spec.js
@@ -9,7 +9,7 @@ import EnvironmentsBlock from '~/jobs/components/job/environments_block.vue';
import ErasedBlock from '~/jobs/components/job/erased_block.vue';
import JobApp from '~/jobs/components/job/job_app.vue';
import JobLog from '~/jobs/components/log/log.vue';
-import JobLogTopBar from '~/jobs/components/job/job_log_controllers.vue';
+import JobLogTopBar from 'ee_else_ce/jobs/components/job/job_log_controllers.vue';
import Sidebar from '~/jobs/components/job/sidebar/sidebar.vue';
import StuckBlock from '~/jobs/components/job/stuck_block.vue';
import UnmetPrerequisitesBlock from '~/jobs/components/job/unmet_prerequisites_block.vue';
diff --git a/spec/frontend/ml/model_registry/routes/models/index/components/ml_models_index_spec.js b/spec/frontend/ml/model_registry/routes/models/index/components/ml_models_index_spec.js
new file mode 100644
index 00000000000..d1715ccd8f1
--- /dev/null
+++ b/spec/frontend/ml/model_registry/routes/models/index/components/ml_models_index_spec.js
@@ -0,0 +1,39 @@
+import { GlLink } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import MlModelsIndexApp from '~/ml/model_registry/routes/models/index';
+import { TITLE_LABEL } from '~/ml/model_registry/routes/models/index/translations';
+import { mockModels } from './mock_data';
+
+let wrapper;
+const createWrapper = (models = mockModels) => {
+ wrapper = shallowMountExtended(MlModelsIndexApp, {
+ propsData: { models },
+ });
+};
+
+const findModelLink = (index) => wrapper.findAllComponents(GlLink).at(index);
+const modelLinkText = (index) => findModelLink(index).text();
+const modelLinkHref = (index) => findModelLink(index).attributes('href');
+const findTitle = () => wrapper.findByText(TITLE_LABEL);
+
+describe('MlModelsIndex', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ describe('header', () => {
+ it('displays the title', () => {
+ expect(findTitle().exists()).toBe(true);
+ });
+ });
+
+ describe('model list', () => {
+ it('displays the models', () => {
+ expect(modelLinkHref(0)).toBe(mockModels[0].path);
+ expect(modelLinkText(0)).toBe(`${mockModels[0].name} / ${mockModels[0].version}`);
+
+ expect(modelLinkHref(1)).toBe(mockModels[1].path);
+ expect(modelLinkText(1)).toBe(`${mockModels[1].name} / ${mockModels[1].version}`);
+ });
+ });
+});
diff --git a/spec/frontend/ml/model_registry/routes/models/index/components/mock_data.js b/spec/frontend/ml/model_registry/routes/models/index/components/mock_data.js
new file mode 100644
index 00000000000..b8a999abbbd
--- /dev/null
+++ b/spec/frontend/ml/model_registry/routes/models/index/components/mock_data.js
@@ -0,0 +1,12 @@
+export const mockModels = [
+ {
+ name: 'model_1',
+ version: '1.0',
+ path: 'path/to/model_1',
+ },
+ {
+ name: 'model_2',
+ version: '1.0',
+ path: 'path/to/model_2',
+ },
+];
diff --git a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
index a27877e7ba8..e5b641c61fd 100644
--- a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
+++ b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
@@ -300,6 +300,7 @@ describe('AlertManagementEmptyState', () => {
unique: true,
symbol: '@',
token: UserToken,
+ dataType: 'user',
operators: OPERATORS_IS,
fetchPath: '/link',
fetchUsers: expect.any(Function),
@@ -311,6 +312,7 @@ describe('AlertManagementEmptyState', () => {
unique: true,
symbol: '@',
token: UserToken,
+ dataType: 'user',
operators: OPERATORS_IS,
fetchPath: '/link',
fetchUsers: expect.any(Function),
diff --git a/spec/graphql/resolvers/ci/runner_job_count_resolver_spec.rb b/spec/graphql/resolvers/ci/runner_job_count_resolver_spec.rb
new file mode 100644
index 00000000000..6336ea883f7
--- /dev/null
+++ b/spec/graphql/resolvers/ci/runner_job_count_resolver_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Ci::RunnerJobCountResolver, feature_category: :runner_fleet do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:irrelevant_pipeline) { create(:ci_pipeline, project: project) }
+
+ let_it_be(:runner) { create(:ci_runner, :project, projects: [project]) }
+
+ let_it_be(:build_one) { create(:ci_build, :success, name: 'Build One', runner: runner, pipeline: pipeline) }
+ let_it_be(:build_two) { create(:ci_build, :success, name: 'Build Two', runner: runner, pipeline: pipeline) }
+ let_it_be(:build_three) { create(:ci_build, :failed, name: 'Build Three', runner: runner, pipeline: pipeline) }
+ let_it_be(:irrelevant_build) { create(:ci_build, name: 'Irrelevant Build', pipeline: irrelevant_pipeline) }
+
+ describe '#resolve' do
+ subject(:job_count) { resolve_job_count(args) }
+
+ let(:args) { {} }
+
+ context 'with authorized user', :enable_admin_mode do
+ let(:current_user) { create(:user, :admin) }
+
+ context 'with statuses argument filtering on successful builds' do
+ let(:args) { { statuses: [Types::Ci::JobStatusEnum.coerce_isolated_input('SUCCESS')] } }
+
+ it { is_expected.to eq 2 }
+ end
+
+ context 'with statuses argument filtering on failed builds' do
+ let(:args) { { statuses: [Types::Ci::JobStatusEnum.coerce_isolated_input('FAILED')] } }
+
+ it { is_expected.to eq 1 }
+ end
+
+ context 'without statuses argument' do
+ it { is_expected.to eq 3 }
+ end
+ end
+
+ context 'with unauthorized user' do
+ let(:current_user) { nil }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ private
+
+ def resolve_job_count(args = {}, context = { current_user: current_user })
+ resolve(described_class, obj: runner, args: args, ctx: context)&.value
+ end
+end
diff --git a/spec/graphql/resolvers/ci/runner_jobs_resolver_spec.rb b/spec/graphql/resolvers/ci/runner_jobs_resolver_spec.rb
index 963a642fa4e..322bead0d3c 100644
--- a/spec/graphql/resolvers/ci/runner_jobs_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runner_jobs_resolver_spec.rb
@@ -9,17 +9,18 @@ RSpec.describe Resolvers::Ci::RunnerJobsResolver, feature_category: :runner_flee
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be(:irrelevant_pipeline) { create(:ci_pipeline, project: project) }
- let!(:build_one) { create(:ci_build, :success, name: 'Build One', runner: runner, pipeline: pipeline) }
- let!(:build_two) { create(:ci_build, :success, name: 'Build Two', runner: runner, pipeline: pipeline) }
- let!(:build_three) { create(:ci_build, :failed, name: 'Build Three', runner: runner, pipeline: pipeline) }
- let!(:irrelevant_build) { create(:ci_build, name: 'Irrelevant Build', pipeline: irrelevant_pipeline) }
+ let_it_be(:runner) { create(:ci_runner, :project, projects: [project]) }
- let(:args) { {} }
- let(:runner) { create(:ci_runner, :project, projects: [project]) }
-
- subject { resolve_jobs(args) }
+ let_it_be(:build_one) { create(:ci_build, :success, name: 'Build One', runner: runner, pipeline: pipeline) }
+ let_it_be(:build_two) { create(:ci_build, :success, name: 'Build Two', runner: runner, pipeline: pipeline) }
+ let_it_be(:build_three) { create(:ci_build, :failed, name: 'Build Three', runner: runner, pipeline: pipeline) }
+ let_it_be(:irrelevant_build) { create(:ci_build, name: 'Irrelevant Build', pipeline: irrelevant_pipeline) }
describe '#resolve' do
+ subject(:jobs) { resolve_jobs(args) }
+
+ let(:args) { {} }
+
context 'with authorized user', :enable_admin_mode do
let(:current_user) { create(:user, :admin) }
diff --git a/spec/graphql/types/commit_signatures/verification_status_enum_spec.rb b/spec/graphql/types/commit_signatures/verification_status_enum_spec.rb
index a0d99f5f0c1..7fc600745df 100644
--- a/spec/graphql/types/commit_signatures/verification_status_enum_spec.rb
+++ b/spec/graphql/types/commit_signatures/verification_status_enum_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe GitlabSchema.types['VerificationStatus'] do
.to match_array(%w[
UNVERIFIED UNVERIFIED_KEY VERIFIED
SAME_USER_DIFFERENT_EMAIL OTHER_USER UNKNOWN_KEY
- MULTIPLE_SIGNATURES REVOKED_KEY
+ MULTIPLE_SIGNATURES REVOKED_KEY VERIFIED_SYSTEM
])
end
end
diff --git a/spec/helpers/ci/jobs_helper_spec.rb b/spec/helpers/ci/jobs_helper_spec.rb
index a9ab4ab3b67..30cad66af04 100644
--- a/spec/helpers/ci/jobs_helper_spec.rb
+++ b/spec/helpers/ci/jobs_helper_spec.rb
@@ -6,14 +6,19 @@ RSpec.describe Ci::JobsHelper do
describe 'job helper functions' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:job) { create(:ci_build, project: project) }
+ let_it_be(:user) { create(:user) }
before do
helper.instance_variable_set(:@project, project)
helper.instance_variable_set(:@build, job)
+
+ allow(helper)
+ .to receive(:current_user)
+ .and_return(user)
end
it 'returns jobs data' do
- expect(helper.jobs_data).to include({
+ expect(helper.jobs_data(project, job)).to include({
"endpoint" => "/#{project.full_path}/-/jobs/#{job.id}.json",
"project_path" => project.full_path,
"artifact_help_url" => "/help/user/gitlab_com/index.md#gitlab-cicd",
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index e5f8918f7bb..db2536b7633 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -476,7 +476,7 @@ RSpec.describe Gitlab::Git::Commit, feature_category: :source_code_management do
let(:commit_id) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' }
it 'returns signature and signed text' do
- signature, signed_text = subject
+ signature, signed_text, signer = subject.values_at(:signature, :signed_text, :signer)
expected_signature = <<~SIGNATURE
-----BEGIN PGP SIGNATURE-----
@@ -509,6 +509,7 @@ RSpec.describe Gitlab::Git::Commit, feature_category: :source_code_management do
expect(signed_text).to eq(expected_signed_text)
expect(signed_text).to be_a_binary_string
+ expect(signer).to eq(:SIGNER_USER)
end
end
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index b08d11223f7..fd66efe12c8 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -1038,4 +1038,38 @@ RSpec.describe Gitlab::GitalyClient::CommitService, feature_category: :gitaly do
end
end
end
+
+ describe '#get_commit_signatures' do
+ let(:project) { create(:project, :test_repo) }
+
+ it 'returns commit signatures for specified commit ids', :aggregate_failures do
+ without_signature = "e63f41fe459e62e1228fcef60d7189127aeba95a" # has no signature
+
+ signed_by_user = [
+ "a17a9f66543673edf0a3d1c6b93bdda3fe600f32", # has signature
+ "7b5160f9bb23a3d58a0accdbe89da13b96b1ece9" # SSH signature
+ ]
+
+ large_signed_text = "8cf8e80a5a0546e391823c250f2b26b9cf15ce88" # has signature and commit message > 4MB
+
+ signatures = client.get_commit_signatures(
+ [without_signature, large_signed_text, *signed_by_user]
+ )
+
+ expect(signatures.keys).to match_array([large_signed_text, *signed_by_user])
+
+ [large_signed_text, *signed_by_user].each do |commit_id|
+ expect(signatures[commit_id][:signature]).to be_present
+ expect(signatures[commit_id][:signer]).to eq(:SIGNER_USER)
+ end
+
+ signed_by_user.each do |commit_id|
+ commit = project.commit(commit_id)
+ expect(signatures[commit_id][:signed_text]).to include(commit.message)
+ expect(signatures[commit_id][:signed_text]).to include(commit.description)
+ end
+
+ expect(signatures[large_signed_text][:signed_text].size).to eq(4971878)
+ end
+ end
end
diff --git a/spec/lib/gitlab/gpg/commit_spec.rb b/spec/lib/gitlab/gpg/commit_spec.rb
index 819a5633a78..6cd5cda69b8 100644
--- a/spec/lib/gitlab/gpg/commit_spec.rb
+++ b/spec/lib/gitlab/gpg/commit_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Gpg::Commit do
+RSpec.describe Gitlab::Gpg::Commit, feature_category: :source_code_management do
let_it_be(:project) { create(:project, :repository, path: 'sample-project') }
let(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
@@ -12,15 +12,17 @@ RSpec.describe Gitlab::Gpg::Commit do
let(:user) { create(:user, email: user_email) }
let(:commit) { create(:commit, project: project, sha: commit_sha, committer_email: committer_email) }
let(:crypto) { instance_double(GPGME::Crypto) }
+ let(:signer) { :SIGNER_USER }
let(:mock_signature_data?) { true }
# gpg_keys must be pre-loaded so that they can be found during signature verification.
let!(:gpg_key) { create(:gpg_key, key: public_key, user: user) }
let(:signature_data) do
- [
- GpgHelpers::User1.signed_commit_signature,
- GpgHelpers::User1.signed_commit_base_data
- ]
+ {
+ signature: GpgHelpers::User1.signed_commit_signature,
+ signed_text: GpgHelpers::User1.signed_commit_base_data,
+ signer: signer
+ }
end
before do
@@ -55,11 +57,12 @@ RSpec.describe Gitlab::Gpg::Commit do
context 'invalid signature' do
let(:signature_data) do
- [
+ {
# Corrupt the key
- GpgHelpers::User1.signed_commit_signature.tr('=', 'a'),
- GpgHelpers::User1.signed_commit_base_data
- ]
+ signature: GpgHelpers::User1.signed_commit_signature.tr('=', 'a'),
+ signed_text: GpgHelpers::User1.signed_commit_base_data,
+ signer: signer
+ }
end
it 'returns nil' do
@@ -185,10 +188,11 @@ RSpec.describe Gitlab::Gpg::Commit do
end
let(:signature_data) do
- [
- GpgHelpers::User3.signed_commit_signature,
- GpgHelpers::User3.signed_commit_base_data
- ]
+ {
+ signature: GpgHelpers::User3.signed_commit_signature,
+ signed_text: GpgHelpers::User3.signed_commit_base_data,
+ signer: signer
+ }
end
it 'returns a valid signature' do
@@ -339,6 +343,25 @@ RSpec.describe Gitlab::Gpg::Commit do
expect(recorder.count).to eq(1)
end
end
+
+ context 'when signature created by GitLab' do
+ let(:signer) { :SIGNER_SYSTEM }
+ let(:gpg_key) { nil }
+
+ it 'returns a valid signature' do
+ expect(described_class.new(commit).signature).to have_attributes(
+ commit_sha: commit_sha,
+ project: project,
+ gpg_key: nil,
+ gpg_key_primary_keyid: GpgHelpers::User1.primary_keyid,
+ gpg_key_user_name: nil,
+ gpg_key_user_email: nil,
+ verification_status: 'verified_system'
+ )
+ end
+
+ it_behaves_like 'returns the cached signature on second call'
+ end
end
describe '#update_signature!' do
diff --git a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb
index 5d444775e53..db88e99970c 100644
--- a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb
+++ b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb
@@ -4,7 +4,13 @@ require 'spec_helper'
RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
describe '#run' do
- let(:signature) { [GpgHelpers::User1.signed_commit_signature, GpgHelpers::User1.signed_commit_base_data] }
+ let(:signature) do
+ {
+ signature: GpgHelpers::User1.signed_commit_signature,
+ signed_text: GpgHelpers::User1.signed_commit_base_data
+ }
+ end
+
let(:committer_email) { GpgHelpers::User1.emails.first }
let!(:commit_sha) { '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' }
let!(:project) { create :project, :repository, path: 'sample-project' }
@@ -183,7 +189,13 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do
end
context 'gpg signature did not have an associated gpg subkey' do
- let(:signature) { [GpgHelpers::User3.signed_commit_signature, GpgHelpers::User3.signed_commit_base_data] }
+ let(:signature) do
+ {
+ signature: GpgHelpers::User3.signed_commit_signature,
+ signed_text: GpgHelpers::User3.signed_commit_base_data
+ }
+ end
+
let(:committer_email) { GpgHelpers::User3.emails.first }
let!(:user) { create :user, email: GpgHelpers::User3.emails.first }
diff --git a/spec/lib/gitlab/ssh/commit_spec.rb b/spec/lib/gitlab/ssh/commit_spec.rb
index 77f37857c82..3b53ed9d1db 100644
--- a/spec/lib/gitlab/ssh/commit_spec.rb
+++ b/spec/lib/gitlab/ssh/commit_spec.rb
@@ -9,7 +9,8 @@ RSpec.describe Gitlab::Ssh::Commit, feature_category: :source_code_management do
let(:commit) { create(:commit, project: project) }
let(:signature_text) { 'signature_text' }
let(:signed_text) { 'signed_text' }
- let(:signature_data) { [signature_text, signed_text] }
+ let(:signer) { :SIGNER_USER }
+ let(:signature_data) { { signature: signature_text, signed_text: signed_text, signer: signer } }
let(:verifier) { instance_double('Gitlab::Ssh::Signature') }
let(:verification_status) { :verified }
@@ -27,7 +28,7 @@ RSpec.describe Gitlab::Ssh::Commit, feature_category: :source_code_management do
})
allow(Gitlab::Ssh::Signature).to receive(:new)
- .with(signature_text, signed_text, commit.committer_email)
+ .with(signature_text, signed_text, signer, commit.committer_email)
.and_return(verifier)
end
diff --git a/spec/lib/gitlab/ssh/signature_spec.rb b/spec/lib/gitlab/ssh/signature_spec.rb
index ee9b38cae7d..cb0b1ff049c 100644
--- a/spec/lib/gitlab/ssh/signature_spec.rb
+++ b/spec/lib/gitlab/ssh/signature_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Gitlab::Ssh::Signature, feature_category: :source_code_management
let_it_be_with_reload(:key) { create(:key, usage_type: :signing, key: public_key_text, user: user) }
let(:signed_text) { 'This message was signed by an ssh key' }
+ let(:signer) { :SIGNER_USER }
let(:signature_text) do
# ssh-keygen -Y sign -n git -f id_test message.txt
@@ -27,6 +28,7 @@ RSpec.describe Gitlab::Ssh::Signature, feature_category: :source_code_management
described_class.new(
signature_text,
signed_text,
+ signer,
committer_email
)
end
@@ -266,6 +268,15 @@ RSpec.describe Gitlab::Ssh::Signature, feature_category: :source_code_management
expect(signature.verification_status).to eq(:other_user)
end
end
+
+ context 'when signature created by GitLab' do
+ let(:signer) { :SIGNER_SYSTEM }
+
+ it 'reports verified_system status' do
+ expect(signature.verification_status).to eq(:verified_system)
+ expect(signature.key_fingerprint).to eq('dw7gPSvYtkCBU+BbTolbbckUEX3sL6NsGIJTQ4PYEnM')
+ end
+ end
end
describe '#key_fingerprint' do
diff --git a/spec/models/ci/catalog/resource_spec.rb b/spec/models/ci/catalog/resource_spec.rb
index 4c1ade5c308..45d49d65b02 100644
--- a/spec/models/ci/catalog/resource_spec.rb
+++ b/spec/models/ci/catalog/resource_spec.rb
@@ -22,6 +22,8 @@ RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
it { is_expected.to delegate_method(:star_count).to(:project) }
it { is_expected.to delegate_method(:forks_count).to(:project) }
+ it { is_expected.to define_enum_for(:state).with_values({ draft: 0, published: 1 }) }
+
describe '.for_projects' do
it 'returns catalog resources for the given project IDs' do
resources_for_projects = described_class.for_projects(project.id)
@@ -65,4 +67,10 @@ RSpec.describe Ci::Catalog::Resource, feature_category: :pipeline_composition do
expect(resource.latest_version).to eq(release3)
end
end
+
+ describe '#state' do
+ it 'defaults to draft' do
+ expect(resource.state).to eq('draft')
+ end
+ end
end
diff --git a/spec/requests/api/graphql/ci/runner_spec.rb b/spec/requests/api/graphql/ci/runner_spec.rb
index 63a657f3962..6acd705c982 100644
--- a/spec/requests/api/graphql/ci/runner_spec.rb
+++ b/spec/requests/api/graphql/ci/runner_spec.rb
@@ -272,12 +272,13 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
let_it_be(:build1) { create(:ci_build, :running, runner: active_project_runner, pipeline: pipeline1) }
let_it_be(:build2) { create(:ci_build, :running, runner: active_project_runner, pipeline: pipeline2) }
- let(:runner_query_fragment) { 'id jobCount' }
let(:query) do
%(
query {
- runner1: runner(id: "#{active_project_runner.to_global_id}") { #{runner_query_fragment} }
- runner2: runner(id: "#{inactive_instance_runner.to_global_id}") { #{runner_query_fragment} }
+ runner1: runner(id: "#{active_project_runner.to_global_id}") { id jobCount(statuses: [RUNNING]) }
+ runner2: runner(id: "#{active_project_runner.to_global_id}") { id jobCount(statuses: FAILED) }
+ runner3: runner(id: "#{active_project_runner.to_global_id}") { id jobCount }
+ runner4: runner(id: "#{inactive_instance_runner.to_global_id}") { id jobCount }
}
)
end
@@ -287,7 +288,9 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
expect(graphql_data).to match a_hash_including(
'runner1' => a_graphql_entity_for(active_project_runner, job_count: 2),
- 'runner2' => a_graphql_entity_for(inactive_instance_runner, job_count: 0)
+ 'runner2' => a_graphql_entity_for(active_project_runner, job_count: 0),
+ 'runner3' => a_graphql_entity_for(active_project_runner, job_count: 2),
+ 'runner4' => a_graphql_entity_for(inactive_instance_runner, job_count: 0)
)
end
@@ -301,7 +304,9 @@ RSpec.describe 'Query.runner(id)', feature_category: :runner_fleet do
expect(graphql_data).to match a_hash_including(
'runner1' => a_graphql_entity_for(active_project_runner, job_count: 1),
- 'runner2' => a_graphql_entity_for(inactive_instance_runner, job_count: 0)
+ 'runner2' => a_graphql_entity_for(active_project_runner, job_count: 0),
+ 'runner3' => a_graphql_entity_for(active_project_runner, job_count: 1),
+ 'runner4' => a_graphql_entity_for(inactive_instance_runner, job_count: 0)
)
end
end
diff --git a/spec/requests/projects/ml/models_controller_spec.rb b/spec/requests/projects/ml/models_controller_spec.rb
index e437812821a..d03748c8dff 100644
--- a/spec/requests/projects/ml/models_controller_spec.rb
+++ b/spec/requests/projects/ml/models_controller_spec.rb
@@ -26,8 +26,7 @@ RSpec.describe Projects::Ml::ModelsController, feature_category: :mlops do
response
end
- # TODO: being removed with https://gitlab.com/gitlab-org/gitlab/-/merge_requests/124833
- xit 'renders the template' do
+ it 'renders the template' do
expect(index_request).to render_template('projects/ml/models/index')
end
diff --git a/spec/support/shared_contexts/user_contribution_events_shared_context.rb b/spec/support/shared_contexts/user_contribution_events_shared_context.rb
index 681c2f0d811..48f0ac1e4ac 100644
--- a/spec/support/shared_contexts/user_contribution_events_shared_context.rb
+++ b/spec/support/shared_contexts/user_contribution_events_shared_context.rb
@@ -95,27 +95,52 @@ RSpec.shared_context 'with user contribution events' do
end
# pushed
- let_it_be(:push_event_payload_pushed) do
+ commit_title = 'Initial commit'
+ let_it_be(:push_event_branch_payload_pushed) do
event = create(:push_event, project: project, author: user)
- create(:push_event_payload, event: event)
+ create(:push_event_payload, event: event, commit_title: commit_title)
event
end
- let_it_be(:push_event_payload_created) do
+ let_it_be(:push_event_branch_payload_created) do
event = create(:push_event, project: project, author: user)
- create(:push_event_payload, event: event, action: :created)
+ create(:push_event_payload, event: event, action: :created, commit_title: commit_title)
event
end
- let_it_be(:push_event_payload_removed) do
+ let_it_be(:push_event_branch_payload_removed) do
event = create(:push_event, project: project, author: user)
create(:push_event_payload, event: event, action: :removed)
event
end
+ let_it_be(:push_event_tag_payload_pushed) do
+ event = create(:push_event, project: project, author: user)
+ create(:push_event_payload, event: event, ref_type: :tag, commit_title: commit_title)
+ event
+ end
+
+ let_it_be(:push_event_tag_payload_created) do
+ event = create(:push_event, project: project, author: user)
+ create(:push_event_payload, event: event, ref_type: :tag, action: :created, commit_title: commit_title)
+ event
+ end
+
+ let_it_be(:push_event_tag_payload_removed) do
+ event = create(:push_event, project: project, author: user)
+ create(:push_event_payload, event: event, ref_type: :tag, action: :removed)
+ event
+ end
+
let_it_be(:bulk_push_event) do
event = create(:push_event, project: project, author: user)
- create(:push_event_payload, event: event, commit_count: 5, commit_from: '83c6aa31482b9076531ed3a880e75627fd6b335c')
+ create(
+ :push_event_payload,
+ event: event,
+ commit_count: 5,
+ commit_from: '83c6aa31482b9076531ed3a880e75627fd6b335c',
+ commit_title: commit_title
+ )
event
end
diff --git a/spec/views/projects/commit/show.html.haml_spec.rb b/spec/views/projects/commit/show.html.haml_spec.rb
index 6d2237e773e..4cfff00d390 100644
--- a/spec/views/projects/commit/show.html.haml_spec.rb
+++ b/spec/views/projects/commit/show.html.haml_spec.rb
@@ -71,14 +71,12 @@ RSpec.describe 'projects/commit/show.html.haml', feature_category: :source_code_
let(:title) { badge_attributes['data-title'].value }
let(:content) { badge_attributes['data-content'].value }
- before do
- render
- end
-
context 'with GPG' do
let(:commit) { project.commit(GpgHelpers::SIGNED_COMMIT_SHA) }
it 'renders unverified badge' do
+ render
+
expect(title).to include('This commit was signed with an unverified signature.')
expect(content).to include(commit.signature.gpg_key_primary_keyid)
end
@@ -88,15 +86,34 @@ RSpec.describe 'projects/commit/show.html.haml', feature_category: :source_code_
let(:commit) { project.commit('7b5160f9bb23a3d58a0accdbe89da13b96b1ece9') }
it 'renders unverified badge' do
+ render
+
expect(title).to include('This commit was signed with an unverified signature.')
expect(content).to match(/SSH key fingerprint:[\s\S].+#{commit.signature.key_fingerprint_sha256}/)
end
+
+ context 'when the commit has been signed by GitLab' do
+ it 'renders verified badge' do
+ allow_next_instance_of(Gitlab::Ssh::Commit) do |instance|
+ allow(instance).to receive(:signer).and_return(:SIGNER_SYSTEM)
+ end
+
+ render
+
+ expect(content).to match(/SSH key fingerprint:[\s\S].+#{commit.signature.key_fingerprint_sha256}/)
+ expect(title).to include(
+ 'This commit was created in the GitLab UI, and signed with a GitLab-verified signature.'
+ )
+ end
+ end
end
context 'with X.509' do
let(:commit) { project.commit('189a6c924013fc3fe40d6f1ec1dc20214183bc97') }
it 'renders unverified badge' do
+ render
+
expect(title).to include('This commit was signed with an unverified signature.')
expect(content).to include(commit.signature.x509_certificate.subject_key_identifier.tr(":", " "))
end