Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-02-08 18:07:53 +00:00
parent c6c5dd8848
commit a3487798ae
65 changed files with 707 additions and 664 deletions

View File

@ -1,5 +1,6 @@
stages:
- sync
- preflight
- prepare
- build-images
- fixtures

View File

@ -395,29 +395,3 @@
before_script:
- export KUBE_CONTEXT="gitlab-org/gitlab:review-apps"
- kubectl config use-context ${KUBE_CONTEXT}
.fast-no-clone-job:
variables:
GIT_STRATEGY: none # We will download the required files for the job from the API
before_script:
# Logic taken from scripts/utils.sh in download_files function
- |
if [[ "${CI_PROJECT_VISIBILITY}" == "public" ]]; then
url="${CI_PROJECT_URL}/raw/${CI_COMMIT_SHA}"
else
url="https://gitlab.com/gitlab-org/gitlab/raw/master"
echo -e "\033[1;31m ************************************ \033[0m"
echo -e "\033[1;31m ************* WARNING! ************* \033[0m"
echo -e "\033[1;31m ************************************ \033[0m"
echo -e "\033[1;31m The following files will be downloaded from gitlab-org/gitlab's master branch: \033[0m"
echo -e "\033[1;31m \t scripts/utils.sh \033[0m"
for file in "${FILES_TO_DOWNLOAD}"; do
echo -e "\033[1;31m \t $file \033[0m"
done
fi
curl "${url}/scripts/utils.sh" --create-dirs --output scripts/utils.sh
- source scripts/utils.sh
- download_files ${FILES_TO_DOWNLOAD}

View File

@ -0,0 +1,14 @@
rails-production-environment:
extends:
- .default-before_script
- .production
- .ruby-cache
- .setup:rules:rails-production-environment
- .use-pg12
stage: preflight
variables:
BUNDLE_WITHOUT: "development:test"
BUNDLE_WITH: "production"
needs: []
script:
- bundle exec rails runner --environment=production 'puts Rails.env'

View File

@ -30,7 +30,6 @@ review-build-cng-env:
extends:
- .default-retry
- .review:rules:review-build-cng
- .fast-no-clone-job
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}-alpine3.16
stage: prepare
needs:
@ -39,10 +38,8 @@ review-build-cng-env:
job: build-assets-image
variables:
BUILD_ENV: build.env
FILES_TO_DOWNLOAD: scripts/trigger-build.rb
before_script:
- apk add --no-cache --update curl # Not present in ruby-alpine, so we add it manually
- !reference [".fast-no-clone-job", before_script]
- source ./scripts/utils.sh
- install_gitlab_gem
script:
- 'ruby -r./scripts/trigger-build.rb -e "puts Trigger.variables_for_env_file(Trigger::CNG.new.variables)" > $BUILD_ENV'
@ -108,7 +105,6 @@ review-deploy:
extends:
- .review-workflow-base
- .review:rules:review-deploy
- .fast-no-clone-job
stage: deploy
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}dtzar/helm-kubectl:3.9.3
needs:
@ -120,17 +116,7 @@ review-deploy:
- "gitlab-${GITLAB_HELM_CHART_REF}"
environment:
action: start
variables:
# We use > instead of | because we want the files to be space-separated.
FILES_TO_DOWNLOAD: >
scripts/review_apps/review-apps.sh
scripts/review_apps/seed-dast-test-data.sh
GITLAB_SHELL_VERSION
GITALY_SERVER_VERSION
GITLAB_WORKHORSE_VERSION
before_script:
- apk add --no-cache --update curl # Not present in ruby-alpine, so we add it manually
- !reference [".fast-no-clone-job", before_script]
- export GITLAB_SHELL_VERSION=$(<GITLAB_SHELL_VERSION)
- export GITALY_VERSION=$(<GITALY_SERVER_VERSION)
- export GITLAB_WORKHORSE_VERSION=$(<GITLAB_WORKHORSE_VERSION)

View File

@ -2155,6 +2155,11 @@
- <<: *if-default-refs
changes: *code-backstage-patterns
.setup:rules:rails-production-environment:
rules:
- <<: *if-default-refs
changes: *code-patterns
.setup:rules:no-ee-check:
rules:
- <<: *if-not-foss

View File

@ -183,9 +183,9 @@ gem 'seed-fu', '~> 2.3.7'
gem 'elasticsearch-model', '~> 7.2'
gem 'elasticsearch-rails', '~> 7.2', require: 'elasticsearch/rails/instrumentation'
gem 'elasticsearch-api', '7.13.3'
gem 'aws-sdk-core', '~> 3.169.0'
gem 'aws-sdk-core', '~> 3.170.0'
gem 'aws-sdk-cloudformation', '~> 1'
gem 'aws-sdk-s3', '~> 1.118.0'
gem 'aws-sdk-s3', '~> 1.119.0'
gem 'faraday_middleware-aws-sigv4', '~>0.3.0'
gem 'typhoeus', '~> 1.4.0' # Used with Elasticsearch to support http keep-alive connections

View File

@ -35,11 +35,11 @@
{"name":"awesome_print","version":"1.9.2","platform":"ruby","checksum":"e99b32b704acff16d768b3468680793ced40bfdc4537eb07e06a4be11133786e"},
{"name":"awrence","version":"1.1.1","platform":"ruby","checksum":"9be584c97408ed92d5e1ca11740853646fe270de675f2f8dd44e8233226dfc97"},
{"name":"aws-eventstream","version":"1.2.0","platform":"ruby","checksum":"ffa53482c92880b001ff2fb06919b9bb82fd847cbb0fa244985d2ebb6dd0d1df"},
{"name":"aws-partitions","version":"1.695.0","platform":"ruby","checksum":"f48ded613316522f5e44905af9c029dc62b43b7acaa917d9322831029ed45174"},
{"name":"aws-partitions","version":"1.703.0","platform":"ruby","checksum":"3d32fcdcb2799fe0472a9b30990035713d7a75ac8b77bd7767ef5ee2914ea748"},
{"name":"aws-sdk-cloudformation","version":"1.41.0","platform":"ruby","checksum":"31e47539719734413671edf9b1a31f8673fbf9688549f50c41affabbcb1c6b26"},
{"name":"aws-sdk-core","version":"3.169.0","platform":"ruby","checksum":"81e75e70dfd4a17c55554b593ad2de5534b6cab9197cd8e278eb8b0a9ad4051b"},
{"name":"aws-sdk-core","version":"3.170.0","platform":"ruby","checksum":"59341e5cf39d70c4069201bd46f914efd2a0aaa257c5c4d02dfa602e36fc847d"},
{"name":"aws-sdk-kms","version":"1.62.0","platform":"ruby","checksum":"b9111c698d783f3f092dcc6a8b9b7e3f53f00e6e501bdc5a4409afdcaf411a1c"},
{"name":"aws-sdk-s3","version":"1.118.0","platform":"ruby","checksum":"85358b4e56a4b56bba0b8995127f1f6123929d9eb7007534925b359a4a0433db"},
{"name":"aws-sdk-s3","version":"1.119.0","platform":"ruby","checksum":"2c34d9162df20ea7a3a736350785a57320caf0ab04896c72f6cd1ba9a508e2e9"},
{"name":"aws-sigv4","version":"1.5.1","platform":"ruby","checksum":"d68c87fff4ee843b4b92b23c7f31f957f254ec6eb064181f7119124aab8b8bb4"},
{"name":"azure-storage-blob","version":"2.0.3","platform":"ruby","checksum":"61b76118843c91776bd24bee22c74adafeb7c4bb3a858a325047dae3b59d0363"},
{"name":"azure-storage-common","version":"2.0.4","platform":"ruby","checksum":"608f4daab0e06b583b73dcffd3246ea39e78056de31630286b0cf97af7d6956b"},

View File

@ -200,11 +200,11 @@ GEM
awesome_print (1.9.2)
awrence (1.1.1)
aws-eventstream (1.2.0)
aws-partitions (1.695.0)
aws-partitions (1.703.0)
aws-sdk-cloudformation (1.41.0)
aws-sdk-core (~> 3, >= 3.99.0)
aws-sigv4 (~> 1.1)
aws-sdk-core (3.169.0)
aws-sdk-core (3.170.0)
aws-eventstream (~> 1, >= 1.0.2)
aws-partitions (~> 1, >= 1.651.0)
aws-sigv4 (~> 1.5)
@ -212,7 +212,7 @@ GEM
aws-sdk-kms (1.62.0)
aws-sdk-core (~> 3, >= 3.165.0)
aws-sigv4 (~> 1.1)
aws-sdk-s3 (1.118.0)
aws-sdk-s3 (1.119.0)
aws-sdk-core (~> 3, >= 3.165.0)
aws-sdk-kms (~> 1)
aws-sigv4 (~> 1.4)
@ -1610,8 +1610,8 @@ DEPENDENCIES
autoprefixer-rails (= 10.2.5.1)
awesome_print
aws-sdk-cloudformation (~> 1)
aws-sdk-core (~> 3.169.0)
aws-sdk-s3 (~> 1.118.0)
aws-sdk-core (~> 3.170.0)
aws-sdk-s3 (~> 1.119.0)
babosa (~> 1.0.4)
base32 (~> 0.3.0)
batch-loader (~> 2.0.1)
@ -1897,4 +1897,4 @@ DEPENDENCIES
yajl-ruby (~> 1.4.3)
BUNDLED WITH
2.4.4
2.4.6

View File

@ -43,10 +43,10 @@ export default {
*/
getFormDataAsObject() {
const assigneeIds = this.form.find('input[name="update[assignee_ids][]"]').val();
const formData = {
update: {
state_event: this.form.find('input[name="update[state_event]"]').val(),
assignee_ids: [this.form.find('input[name="update[assignee_ids][]"]').val()],
milestone_id: this.form.find('input[name="update[milestone_id]"]').val(),
issuable_ids: this.form.find('input[name="update[issuable_ids]"]').val(),
subscription_event: this.form.find('input[name="update[subscription_event]"]').val(),
@ -57,6 +57,9 @@ export default {
remove_label_ids: [],
},
};
if (assigneeIds) {
formData.update.assignee_ids = [assigneeIds];
}
if (this.willUpdateLabels) {
formData.update.add_label_ids = this.$labelDropdown.data('user-checked');
formData.update.remove_label_ids = this.$labelDropdown.data('user-unchecked');

View File

@ -1,25 +0,0 @@
import axios from './axios_utils';
import { normalizeHeaders, parseIntPagination } from './common_utils';
// This is used in the select2 config to replace jQuery.ajax with axios
export const select2AxiosTransport = (params) => {
axios({
method: params.type?.toLowerCase() || 'get',
url: params.url,
params: params.data,
})
.then((res) => {
const results = res.data || [];
const headers = normalizeHeaders(res.headers);
const pagination = parseIntPagination(headers);
const more = pagination.nextPage > pagination.page;
params.success({
results,
pagination: {
more,
},
});
})
.catch(params.error);
};

View File

@ -4,7 +4,7 @@ const viewers = {
image: () => import('./image_viewer.vue'),
video: () => import('./video_viewer.vue'),
empty: () => import('./empty_viewer.vue'),
text: () => import('~/vue_shared/components/source_viewer/source_viewer.vue'),
text: () => import('~/vue_shared/components/source_viewer/source_viewer_deprecated.vue'),
pdf: () => import('./pdf_viewer.vue'),
lfs: () => import('./lfs_viewer.vue'),
audio: () => import('./audio_viewer.vue'),

View File

@ -13,7 +13,7 @@ import {
LINES_PER_CHUNK,
LEGACY_FALLBACKS,
} from './constants';
import Chunk from './components/chunk.vue';
import Chunk from './components/chunk_deprecated.vue';
import { registerPlugins } from './plugins/index';
/*

View File

@ -12,6 +12,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
layout 'project'
before_action :authorize_read_build!
before_action :authorize_read_build_trace!, only: [:download]
before_action :authorize_read_job_artifacts!, only: [:download]
before_action :authorize_update_build!, only: [:keep]
before_action :authorize_destroy_artifacts!, only: [:destroy]
before_action :extract_ref_name_and_path
@ -41,10 +42,10 @@ class Projects::ArtifactsController < Projects::ApplicationController
end
def download
return render_404 unless artifacts_file
return render_404 unless artifact_file
log_artifacts_filesize(artifacts_file.model)
send_upload(artifacts_file, attachment: artifacts_file.filename, proxy: params[:proxy])
log_artifacts_filesize(artifact_file.model)
send_upload(artifact_file, attachment: artifact_file.filename, proxy: params[:proxy])
end
def browse
@ -83,11 +84,11 @@ class Projects::ArtifactsController < Projects::ApplicationController
def raw
return render_404 unless zip_artifact?
return render_404 unless artifacts_file
return render_404 unless artifact_file
path = Gitlab::Ci::Build::Artifacts::Path.new(params[:path])
send_artifacts_entry(artifacts_file, path)
send_artifacts_entry(artifact_file, path)
end
def keep
@ -154,8 +155,12 @@ class Projects::ArtifactsController < Projects::ApplicationController
project.latest_successful_build_for_ref(params[:job], @ref_name)
end
def artifacts_file
@artifacts_file ||= build&.artifacts_file_for_type(params[:file_type] || :archive)
def job_artifact
@job_artifact ||= build&.artifact_for_type(params[:file_type] || :archive)
end
def artifact_file
@artifact_file ||= job_artifact&.file
end
def zip_artifact?
@ -176,4 +181,8 @@ class Projects::ArtifactsController < Projects::ApplicationController
super
end
def authorize_read_job_artifacts!
return access_denied! unless can?(current_user, :read_job_artifacts, job_artifact)
end
end

View File

@ -868,10 +868,10 @@ module Ci
job_artifacts.update_all(expire_at: nil)
end
def artifacts_file_for_type(type)
def artifact_for_type(type)
file_types = Ci::JobArtifact.associated_file_types_for(type)
file_types_ids = file_types&.map { |file_type| Ci::JobArtifact.file_types[file_type] }
job_artifacts.find_by(file_type: file_types_ids)&.file
job_artifacts.find_by(file_type: file_types_ids)
end
def steps
@ -1288,6 +1288,19 @@ module Ci
def track_ci_secrets_management_id_tokens_usage
::Gitlab::UsageDataCounters::HLLRedisCounter.track_event('i_ci_secrets_management_id_tokens_build_created', values: user_id)
Gitlab::Tracking.event(
self.class.to_s,
'create_id_tokens',
namespace: namespace,
user: user,
label: 'redis_hll_counters.ci_secrets_management.i_ci_secrets_management_id_tokens_build_created_monthly',
ultimate_namespace_id: namespace.root_ancestor.id,
context: [Gitlab::Tracking::ServicePingContext.new(
data_source: :redis_hll,
event: 'i_ci_secrets_management_id_tokens_build_created'
).to_context]
)
end
end
end

View File

@ -6,13 +6,13 @@ module Onboarding
include Gitlab::Experiment::Dsl
ACTION_ISSUE_IDS = {
pipeline_created: 7,
trial_started: 2,
required_mr_approvals_enabled: 11,
code_owners_enabled: 10
}.freeze
ACTION_PATHS = [
:pipeline_created,
:issue_created,
:git_write,
:merge_request_created,

View File

@ -17,10 +17,8 @@ module Ci
return if pipeline.parent_pipeline? # skip if child pipeline
return unless project.auto_cancel_pending_pipelines?
pipelines = move_service_to_async? ? parent_and_child_pipelines : all_auto_cancelable_pipelines
Gitlab::OptimisticLocking
.retry_lock(pipelines, name: 'cancel_pending_pipelines') do |cancelables|
.retry_lock(parent_and_child_pipelines, name: 'cancel_pending_pipelines') do |cancelables|
cancelables.select(:id).each_batch(of: BATCH_SIZE) do |cancelables_batch|
auto_cancel_interruptible_pipelines(cancelables_batch.ids)
end
@ -31,15 +29,11 @@ module Ci
attr_reader :pipeline, :project
def all_auto_cancelable_pipelines
same_ref_pipelines
.id_not_in(pipeline.id)
.ci_and_parent_sources
.alive_or_scheduled
end
def parent_auto_cancelable_pipelines
same_ref_pipelines
project.all_pipelines
.created_after(1.week.ago)
.for_ref(pipeline.ref)
.where_not_sha(project.commit(pipeline.ref).try(:id))
.where("created_at < ?", pipeline.created_at)
.ci_sources
end
@ -71,18 +65,6 @@ module Ci
)
end
end
def same_ref_pipelines
project.all_pipelines
.created_after(1.week.ago)
.for_ref(pipeline.ref)
.where_not_sha(project.commit(pipeline.ref).try(:id))
end
def move_service_to_async?
Feature.enabled?(:move_cancel_pending_pipelines_to_async, project)
end
strong_memoize_attr :move_service_to_async?
end
end
end

View File

@ -98,6 +98,7 @@ module MergeRequests
commit_id
ensure
merge_request.update_and_mark_in_progress_merge_commit_sha(nil)
log_info("Merge request marked in progress")
end
def update_merge_sha_metadata(commit_id)

View File

@ -0,0 +1,26 @@
---
description: Pipeline created with id_tokens (JWT tokens) defined
category: Ci::Build
action: create_id_tokens
label_description: "Mirrored Service Ping key_path: redis_hll_counters.ci_secrets_management.i_ci_secrets_management_id_tokens_build_created_monthly"
property_description:
value_description:
extra_properties:
ultimate_namespace_id: "The ID of the Ultimate Namespace"
identifiers:
- user
- namespace
product_section: ops
product_stage: verify
product_group: pipeline_authoring
product_category: secrets_management
milestone: "15.9"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/110422
distributions:
- ce
- ee
tiers:
- free
- premium
- ultimate

View File

@ -1,8 +0,0 @@
---
name: move_cancel_pending_pipelines_to_async
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/106347
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/385099
milestone: '15.9'
type: development
group: group::pipeline execution
default_enabled: false

View File

@ -0,0 +1,20 @@
- title: "File Type variable expansion in `.gitlab-ci.yml`" # (required) The name of the feature to be deprecated
announcement_milestone: "15.5" # (required) The milestone when this feature was first announced as deprecated.
removal_milestone: "15.7" # (required) The milestone when this feature is planned to be removed
removal_date: "2022-12-22" # (required) This should almost always be the 22nd of a month (YYYY-MM-DD), the date of the milestone release when this feature will be removed.
breaking_change: true # (required) If this deprecation is a breaking change, set this value to true
reporter: DarrenEastman # (required) GitLab username of the person reporting the deprecation
stage: Verify # (required) String value of the stage that the feature was created in. e.g., Growth
issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/29407 # (required) Link to the deprecation issue in GitLab
body: | # (required) Do not modify this line, instead modify the lines below.
Prior to this change, variables that referenced or applied alias file variables expanded the value of the `File` type variable. For example, the file contents. This behavior was incorrect because it did not comply with typical shell variable expansion rules. A user could run an $echo command with the variable as an input parameter to leak secrets or sensitive information stored in 'File' type variables.
In 15.7, we are removing file type variable expansion from GitLab. It is essential to check your CI pipelines to confirm if your scripts reference a file variable. If your CI job relies on the previous expansion functionality, that CI job will not work and generate an error as of 15.7. The new behavior is that variable expansion that reference or apply alias file variables expand to the file name or path of the `File` type variable, instead of its value, such as the file contents.
# OPTIONAL FIELDS
#
tiers: # (optional - may be required in the future) An array of tiers that the feature is available in currently. e.g., [Free, Silver, Gold, Core, Premium, Ultimate]
documentation_url: # (optional) This is a link to the current documentation page
image_url: # (optional) This is a link to a thumbnail image depicting the feature
video_url: # (optional) Use the youtube thumbnail URL with the structure of https://img.youtube.com/vi/UNIQUEID/hqdefault.jpg

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
class AddGitRateLimitUsersAlertlistToApplicationSettings < Gitlab::Database::Migration[2.1]
def change
add_column :application_settings, :git_rate_limit_users_alertlist,
:integer,
array: true,
default: [],
null: false
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
class AddUniqueProjectDownloadLimitAlertlistToNamespaceSettings < Gitlab::Database::Migration[2.1]
enable_lock_retries!
def change
add_column :namespace_settings, :unique_project_download_limit_alertlist,
:integer,
array: true,
default: [],
null: false
end
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
class RebalancePartitionIdCiPipeline < Gitlab::Database::Migration[2.1]
MIGRATION = 'RebalancePartitionId'
DELAY_INTERVAL = 2.minutes
TABLE = :ci_pipelines
BATCH_SIZE = 2_000
SUB_BATCH_SIZE = 200
restrict_gitlab_migration gitlab_schema: :gitlab_ci
def up
return unless Gitlab.com?
queue_batched_background_migration(
MIGRATION,
TABLE,
:id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
return unless Gitlab.com?
delete_batched_background_migration(MIGRATION, TABLE, :id, [])
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddApplicationSettingsGitUsersAlertlistMaxUsernamesConstraint < Gitlab::Database::Migration[2.1]
CONSTRAINT_NAME = 'app_settings_git_rate_limit_users_alertlist_max_usernames'
disable_ddl_transaction!
def up
add_check_constraint :application_settings, 'CARDINALITY(git_rate_limit_users_alertlist) <= 100', CONSTRAINT_NAME
end
def down
remove_check_constraint :application_settings, CONSTRAINT_NAME
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddNamespaceSettingsUniqueProjectDownloadLimitAlertlistSizeConstraint < Gitlab::Database::Migration[2.1]
CONSTRAINT_NAME = 'namespace_settings_unique_project_download_limit_alertlist_size'
disable_ddl_transaction!
def up
add_check_constraint :namespace_settings,
'CARDINALITY(unique_project_download_limit_alertlist) <= 100',
CONSTRAINT_NAME
end
def down
remove_check_constraint :namespace_settings, CONSTRAINT_NAME
end
end

View File

@ -0,0 +1 @@
364f785b564d92d2956c5ffea71091561231888ffa6f4cd9125fc8ebf9150f77

View File

@ -0,0 +1 @@
a95107b71a3e9c6a07b2e50ad18c37b93d0ec3ebcda3cbb8075603f1fb91c555

View File

@ -0,0 +1 @@
08d2be57e3b7293bc6179e2265a7a627cb3b2327f6027039e9c3af9fa746425c

View File

@ -0,0 +1 @@
bf4c2b95c008f100045bad2b2c31ce03564f5b3299711ad7a423febd49117f52

View File

@ -0,0 +1 @@
c5d7812734f7561736dafb427fe5276c248aad40a28304e6a785660bec2f54e2

View File

@ -11639,10 +11639,12 @@ CREATE TABLE application_settings (
search_max_docs_denominator integer DEFAULT 5000000 NOT NULL,
search_min_docs_before_rollover integer DEFAULT 100000 NOT NULL,
deactivation_email_additional_text text,
git_rate_limit_users_alertlist integer[] DEFAULT '{}'::integer[] NOT NULL,
CONSTRAINT app_settings_container_reg_cleanup_tags_max_list_size_positive CHECK ((container_registry_cleanup_tags_service_max_list_size >= 0)),
CONSTRAINT app_settings_container_registry_pre_import_tags_rate_positive CHECK ((container_registry_pre_import_tags_rate >= (0)::numeric)),
CONSTRAINT app_settings_dep_proxy_ttl_policies_worker_capacity_positive CHECK ((dependency_proxy_ttl_group_policy_worker_capacity >= 0)),
CONSTRAINT app_settings_ext_pipeline_validation_service_url_text_limit CHECK ((char_length(external_pipeline_validation_service_url) <= 255)),
CONSTRAINT app_settings_git_rate_limit_users_alertlist_max_usernames CHECK ((cardinality(git_rate_limit_users_alertlist) <= 100)),
CONSTRAINT app_settings_git_rate_limit_users_allowlist_max_usernames CHECK ((cardinality(git_rate_limit_users_allowlist) <= 100)),
CONSTRAINT app_settings_max_pages_custom_domains_per_project_check CHECK ((max_pages_custom_domains_per_project >= 0)),
CONSTRAINT app_settings_max_terraform_state_size_bytes_check CHECK ((max_terraform_state_size_bytes >= 0)),
@ -18353,7 +18355,9 @@ CREATE TABLE namespace_settings (
default_compliance_framework_id bigint,
runner_registration_enabled boolean DEFAULT true,
allow_runner_registration_token boolean DEFAULT true NOT NULL,
unique_project_download_limit_alertlist integer[] DEFAULT '{}'::integer[] NOT NULL,
CONSTRAINT check_0ba93c78c7 CHECK ((char_length(default_branch_name) <= 255)),
CONSTRAINT namespace_settings_unique_project_download_limit_alertlist_size CHECK ((cardinality(unique_project_download_limit_alertlist) <= 100)),
CONSTRAINT namespace_settings_unique_project_download_limit_allowlist_size CHECK ((cardinality(unique_project_download_limit_allowlist) <= 100))
);

View File

@ -81,7 +81,7 @@ From left to right, the performance bar displays:
NOTE:
Not all indicators are available in all environments. For instance, the memory view
requires running Ruby with [specific patches](https://gitlab.com/gitlab-org/gitlab-build-images/-/blob/master/patches/ruby/2.7.4/thread-memory-allocations-2.7.patch)
applied. When running GitLab locally using [GDK](../../../development/contributing/index.md#gitlab-development-kit),
applied. When running GitLab locally using the [GDK](https://gitlab.com/gitlab-org/gitlab-development-kit),
this is typically not the case and the memory view cannot be used.
## Keyboard shortcut

View File

@ -93,3 +93,21 @@ GET /projects/:id/merge_requests/:merge_request_iid/draft_notes/:draft_note_id
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/14/merge_requests/11/draft_notes/5"
```
## Delete a draft note
Deletes an existing draft note for a given merge request.
```plaintext
DELETE /projects/:id/merge_requests/:merge_request_iid/draft_notes/:draft_note_id
```
| Attribute | Type | Required | Description |
| ------------------- | ---------------- | ----------- | --------------------- |
| `draft_note_id` | integer | yes | The ID of a draft note.
| `id` | integer or string | yes | The ID or [URL-encoded path of the project](rest/index.md#namespaced-path-encoding).
| `merge_request_iid` | integer | yes | The IID of a project merge request.
```shell
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/14/merge_requests/11/draft_notes/5"
```

View File

@ -993,6 +993,7 @@ PUT /groups/:id
| `unique_project_download_limit` **(ULTIMATE)** | integer | no | Maximum number of unique projects a user can download in the specified time period before they are banned. Available only on top-level groups. Default: 0, Maximum: 10,000. |
| `unique_project_download_limit_interval_in_seconds` **(ULTIMATE)** | integer | no | Time period during which a user can download a maximum amount of projects before they are banned. Available only on top-level groups. Default: 0, Maximum: 864,000 seconds (10 days). |
| `unique_project_download_limit_allowlist` **(ULTIMATE)** | array of strings | no | List of usernames excluded from the unique project download limit. Available only on top-level groups. Default: `[]`, Maximum: 100 usernames. |
| `unique_project_download_limit_alertlist` **(ULTIMATE)** | array of integers | no | List of user IDs that are emailed when the unique project download limit is exceeded. Available only on top-level groups. Default: `[]`, Maximum: 100 user IDs. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/110201) in GitLab 15.9. |
| `auto_ban_user_on_excessive_projects_download` **(ULTIMATE)** | boolean | no | When enabled, users are automatically banned from the group when they download more than the maximum number of unique projects specified by `unique_project_download_limit` and `unique_project_download_limit_interval_in_seconds`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/94159) in GitLab 15.4. |
| `ip_restriction_ranges` **(PREMIUM)** | string | no | Comma-separated list of IP addresses or subnet masks to restrict group access. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/351493) in GitLab 15.4. |

View File

@ -408,6 +408,7 @@ listed in the descriptions of the relevant settings.
| `max_number_of_repository_downloads` **(ULTIMATE SELF)** | integer | no | Maximum number of unique repositories a user can download in the specified time period before they are banned. Default: 0, Maximum: 10,000 repositories. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87980) in GitLab 15.1. |
| `max_number_of_repository_downloads_within_time_period` **(ULTIMATE SELF)** | integer | no | Reporting time period (in seconds). Default: 0, Maximum: 864000 seconds (10 days). [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87980) in GitLab 15.1. |
| `git_rate_limit_users_allowlist` **(ULTIMATE SELF)** | array of strings | no | List of usernames excluded from Git anti-abuse rate limits. Default: `[]`, Maximum: 100 usernames. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/90815) in GitLab 15.2. |
| `git_rate_limit_users_alertlist` **(ULTIMATE SELF)** | array of integers | no | List of user IDs that are emailed when the Git abuse rate limit is exceeded. Default: `[]`, Maximum: 100 user IDs. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/110201) in GitLab 15.9. |
| `auto_ban_user_on_excessive_projects_download` **(ULTIMATE SELF)** | boolean | no | When enabled, users will get automatically banned from the application when they download more than the maximum number of unique projects in the time period specified by `max_number_of_repository_downloads` and `max_number_of_repository_downloads_within_time_period` respectively. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/94153) in GitLab 15.4 |
| `mirror_available` | boolean | no | Allow repository mirroring to configured by project Maintainers. If disabled, only Administrators can configure repository mirroring. |
| `mirror_capacity_threshold` **(PREMIUM)** | integer | no | Minimum capacity to be available before scheduling more mirrors preemptively. |

View File

@ -86,6 +86,8 @@ Parent-child relationships form the basis of **hierarchy** in work items. Each w
As types expand, and parent items have their own parent items, the hierarchy capability can grow exponentially.
[Pajamas](https://design.gitlab.com/objects/work-item#hierarchy) documents how to display hierarchies depending on context.
### Work Item view
The new frontend view that renders Work Items of any type using global Work Item `id` as an identifier.
@ -135,6 +137,7 @@ Work Item architecture is designed with making all the features for all the type
### Links
- [Work items in Pajamas Design System](https://design.gitlab.com/objects/work-item)
- [Work items initiative epic](https://gitlab.com/groups/gitlab-org/-/epics/6033)
- [Tasks roadmap](https://gitlab.com/groups/gitlab-org/-/epics/7103?_gl=1*zqatx*_ga*NzUyOTc3NTc1LjE2NjEzNDcwMDQ.*_ga_ENFH3X7M5Y*MTY2MjU0MDQ0MC43LjEuMTY2MjU0MDc2MC4wLjAuMA..)
- [Work Item "Vision" Prototype](https://gitlab.com/gitlab-org/gitlab/-/issues/368607)

View File

@ -26,14 +26,6 @@ Throughout this guide you will see references to CE and EE for abbreviation.
To get an overview of GitLab community membership, including those that would review or merge
your contributions, visit [the community roles page](community_roles.md).
## Security vulnerability disclosure
Report suspected security vulnerabilities by following the
[disclosure process on the GitLab.com website](https://about.gitlab.com/security/disclosure/).
WARNING:
Do **not** create publicly viewable issues for suspected security vulnerabilities.
## Code of conduct
We want to create a welcoming environment for everyone who is interested in contributing.
@ -73,13 +65,6 @@ within the MR.
GitLab values the time spent by contributors on reporting bugs. However, if a bug remains inactive for a very long period,
it will qualify for auto-closure. Please refer to the [auto-close inactive bugs](https://about.gitlab.com/handbook/engineering/quality/triage-operations/#auto-close-inactive-bugs) section in our handbook to understand the complete workflow.
## Helping others
Help other GitLab users when you can.
The methods people use to seek help can be found on the [getting help page](https://about.gitlab.com/get-help/).
Sign up for the mailing list, answer GitLab questions on StackOverflow or respond in the IRC channel.
## How to contribute
If you would like to contribute to GitLab:
@ -94,26 +79,6 @@ If you would like to contribute to GitLab:
could speed them up.
- Consult the [Contribution Flow](#contribution-flow) section to learn the process.
### Communication channels
If you have any questions or need help, visit [Getting Help](https://about.gitlab.com/get-help/) to learn how to
communicate with the GitLab community. GitLab prefers [asynchronous communication](https://about.gitlab.com/handbook/communication/#internal-communication) over real-time communication.
We do encourage you to connect and hang out with us. GitLab has a Gitter room dedicated for [contributors](https://gitter.im/gitlab/contributors), which is bridged with our
internal Slack. We actively monitor this channel. There is also a community-run [Discord server](https://discord.com/invite/gitlab) where you can
find other contributors in the `#contributors` channel.
Thanks for your contribution!
### GitLab Development Kit
The GitLab Development Kit (GDK) helps contributors run a local GitLab instance with all the
required dependencies. It can be used to test changes to GitLab and related projects before raising
a Merge Request.
For more information, see the [`gitlab-development-kit`](https://gitlab.com/gitlab-org/gitlab-development-kit)
project.
### Contribution flow
The general flow of contributing to GitLab is:
@ -194,7 +159,6 @@ If you are not sure who to mention, the reviewer will do this for you early in t
This [documentation](issue_workflow.md) outlines the current issue workflow:
- [Issue tracker guidelines](issue_workflow.md#issue-tracker-guidelines)
- [Issue triaging](issue_workflow.md#issue-triaging)
- [Labels](issue_workflow.md#labels)
- [Feature proposals](issue_workflow.md#feature-proposals)
@ -230,3 +194,8 @@ For information on how to contribute documentation, see GitLab
If you need a license for contributing to an EE-feature, see
[relevant information](https://about.gitlab.com/handbook/marketing/community-relations/code-contributor-program/operations/#contributing-to-the-gitlab-enterprise-edition-ee).
## Finding help
- [Get help](https://about.gitlab.com/get-help/).
- Join the community-run [Discord server](https://discord.com/invite/gitlab) and find other contributors in the `#contribute` channel.

View File

@ -7,15 +7,19 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Issues workflow
## Issue tracker guidelines
**Before you submit an issue, [search the issue tracker](https://gitlab.com/gitlab-org/gitlab/-/issues)**
for similar entries. Someone else might have already had the same bug or feature proposal.
If you find an existing issue, show your support with an award emoji and add your notes to the discussion.
**[Search the issue tracker](https://gitlab.com/gitlab-org/gitlab/-/issues)** for similar entries before
submitting your own, there's a good chance somebody else had the same issue or
feature proposal. Show your support with an award emoji and/or join the
discussion.
To submit a bug:
Please submit bugs using the ['Bug' issue template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/issue_templates/Bug.md) provided on the issue tracker.
The text in the comments (`<!-- ... -->`) is there to help you with what to include.
- Use the ['Bug' issue template](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/issue_templates/Bug.md).
The text in the comments (`<!-- ... -->`) should help you with which information to include.
- To report a suspected security vulnerability, follow the
[disclosure process on the GitLab.com website](https://about.gitlab.com/security/disclosure/).
WARNING:
Do **not** create publicly viewable issues for suspected security vulnerabilities.
## Issue triaging

View File

@ -11,38 +11,11 @@ description: "Development Guidelines: learn how to contribute to GitLab."
Learn how to contribute to the development of the GitLab product.
This content is intended for members of the GitLab Team as well as community
contributors. Content specific to the GitLab Team should instead be included in
the [Handbook](https://about.gitlab.com/handbook/).
This content is intended for GitLab team members as well as members of the wider community.
For information on using GitLab to work on your own software projects, see the
[GitLab user documentation](../user/index.md).
For information on working with the GitLab APIs, see the [API documentation](../api/rest/index.md).
For information about how to install, configure, update, and upgrade your own
GitLab instance, see the [Administrator documentation](../administration/index.md).
## Get started
- Set up the GitLab development environment with the
[GitLab Development Kit (GDK)](https://gitlab.com/gitlab-org/gitlab-development-kit/-/blob/main/README.md)
- [GitLab contributing guide](contributing/index.md)
- [Issues workflow](contributing/issue_workflow.md) for more information about:
- Issue tracker guidelines.
- Triaging.
- Labels.
- Feature proposals.
- Issue weight.
- Regression issues.
- Technical or UX debt.
- [Merge requests workflow](contributing/merge_request_workflow.md) for more
information about:
- Merge request guidelines.
- Contribution acceptance criteria.
- Definition of done.
- Dependencies.
- [Style guides](contributing/style_guides.md)
- [Implement design & UI elements](contributing/design.md)
- [GitLab Architecture Overview](architecture.md)
- [Rake tasks](rake_tasks.md) for development
- [Contribute to GitLab development](contributing/index.md)
- [Contribute to Omnibus development](https://docs.gitlab.com/omnibus/development/)
- [Contribute to GitLab Pages development](pages/index.md)
- [Contribute to GitLab Runner development](https://docs.gitlab.com/runner/development/)
- [Contribute to Helm chart development](https://docs.gitlab.com/charts/development/)
- [Contribute to the GitLab documentation](documentation/index.md)

View File

@ -0,0 +1,153 @@
---
stage: Package
group: Harbor Registry
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Harbor Registry
## Enable Harbor Registry
To enable the Harbor Registry, you must configure the Harbor integration for your group or project.
The Harbor configuration requires four fields: `url`, `project_name`, `username` and `password`.
| Field | Description |
| --- | --- |
| `url` | The URL of the Harbor instance. |
| `project_name` | The project name of the Harbor instance. |
| `username` | The username used to log in to the Harbor instance. |
| `password` | The password used to log in to the Harbor instance. |
You can use [GitLab CI/CD predefined variables](../../ci/variables/index.md) along with the following Harbor Registry variables to request data from the Harbor instance.
| Variable | Description |
| --- | --- |
| `HARBOR_URL` | The URL of the Harbor instance. |
| `HARBOR_HOST` | The host of the Harbor instance URL. |
| `HARBOR_OCI` | The OCI URL of the Harbor instance URL. |
| `HARBOR_PROJECT` | The project name of the Harbor instance. |
| `HARBOR_USERNAME` | The username used to log in to the Harbor instance. |
| `HARBOR_PASSWORD` | The password used to log in to the Harbor instance. |
### Test settings
When testing the settings, a request is sent to `/api/v2.0/ping` of the Harbor instance. A successful test returns status code `200`. This test is primarily to verify that the Harbor instance is configured correctly. It doesn't verify that the `username` and `password` are correct.
## Code structure
```shell
app/controllers/concerns/harbor
├── access.rb
├── artifact.rb
├── repository.rb
└── tag.rb
app/controllers/projects/harbor
├── application_controller.rb
├── artifacts_controller.rb
├── repositories_controller.rb
└── tags_controller.rb
app/controllers/groups/harbor
├── application_controller.rb
├── artifacts_controller.rb
├── repositories_controller.rb
└── tags_controller.rb
app/models/integrations/harbor.rb
app/serializers/integrations/harbor_serializers
├── artifact_entity.rb
├── artifact_serializer.rb
├── repository_entity.rb
├── repository_serializer.rb
├── tag_entity.rb
└── tag_serializer.rb
lib/gitlab/harbor
├── client.rb
└── query.rb
```
The controllers under `app/controllers/projects/harbor` and `app/controllers/groups/harbor` provide the API interface for front-end calls.
The modules under `app/controllers/concerns/harbor` provide some common methods used by controllers.
The Harbor integration model is under `app/models/integrations`, and it contains some configuration information for Harbor integration.
The serializers under `app/serializers/integrations/harbor_serializers` are used by the controllers under `app/controllers/projects/harbor` and `app/controllers/groups/harbor`, and they help controllers to serialize the JSON data in the response.
The `lib/gitlab/harbor` directory contains the Harbor client, which sends API requests to the Harbor instances to retrieve data.
## Sequence diagram
```mermaid
sequenceDiagram
Client->>+GitLab: Request Harbor Registry
GitLab->>+Harbor instance: Request repositories data via API
Harbor instance->>+GitLab: Repositories data
GitLab->>+Client: Return repositories data
Client->>+GitLab: Request Harbor Registry artifacts
GitLab->>+Harbor instance: Request artifacts data via API
Harbor instance->>+GitLab: Artifacts data
GitLab->>+Client: Return artifacts data
Client->>+GitLab: Request Harbor Registry tags
GitLab->>+Harbor instance: Request tags data via API
Harbor instance->>+GitLab: Tags data
GitLab->>+Client: Return tags data
```
## Policy
The`read_harbor_registry` policy for groups and projects is used to control whether users have access to Harbor Registry.
This policy is enabled for every user with the Reporter role and above.
## Frontend Development
The relevant front-end code is located in the `app/assets/javascripts/packages_and_registries/harbor_registry/` directory. The file structure is as follows:
```shell
├── components
│ ├── details
│ │ ├── artifacts_list_row.vue
│ │ ├── artifacts_list.vue
│ │ └── details_header.vue
│ ├── list
│ │ ├── harbor_list_header.vue
│ │ ├── harbor_list_row.vue
│ │ └── harbor_list.vue
│ ├── tags
│ │ ├── tags_header.vue
│ │ ├── tags_list_row.vue
│ │ └── tags_list.vue
│ └── harbor_registry_breadcrumb.vue
├── constants
│ ├── common.js
│ ├── details.js
│ ├── index.js
│ └── list.js
├── pages
│ ├── details.vue
│ ├── harbor_tags.vue
│ ├── index.vue
│ └── list.vue
├── index.js
├── router.js
└── utils.js
```
NOTE:
You can check out this [discussion](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/82777#note_1017875324) to see why we use the REST API instead of GraphQL.
The file `harbor_registry/pages/index.vue` only contains a single Vue router-view component, which navigates to the `images list`, `image detail`, and `tags list` pages via `router.js`.
Because `registry_breadcrumb.vue` component does not support multi-level paths, we have reimplemented the `harbor_registry/components/harbor_registry_breadcrumb.vue` component.
A multi-level breadcrumb component can be generated by passing a path array to `harbor_registry_breadcrumb.vue`.
```javascript
const routeNameList = [];
const hrefList = [];
this.breadCrumbState.updateName(nameList);
this.breadCrumbState.updateHref(hrefList);
```

View File

@ -37,3 +37,9 @@ Development and architectural documentation for the container registry
- [Settings](settings.md)
- [Structure / Schema](structure.md)
- [Cleanup policies](cleanup_policies.md)
## Harbor registry development
Development and architectural documentation for the harbor registry
- [Development documentation](harbor_registry_development.md)

View File

@ -275,94 +275,3 @@ qa:selectors-as-if-foss:
extends:
- .qa:rules:as-if-foss
```
### Extend the `.fast-no-clone-job` job
Downloading the branch for the canonical project takes between 20 and 30 seconds.
Some jobs only need a limited number of files, which we can download via the `raw` API, e.g. `https://gitlab.com/gitlab-org/gitlab/raw/master/VERSION`.
You can skip a job `git clone`/`git fetch` by adding the following pattern to a job:
```yaml
# Scenario 1: no before_script is defined in the job
#
# You can just extend the .fast-no-clone-job
extends:
- .fast-no-clone-job
variables:
FILES_TO_DOWNLOAD: >
scripts/rspec_helpers.sh
scripts/slack
```
```yaml
# Scenario 2: a before_script block is already defined in the job
#
# You will have to include the .fast-no-clone-job via a !reference as well
extends:
- .fast-no-clone-job
variables:
FILES_TO_DOWNLOAD: >
scripts/rspec_helpers.sh
scripts/slack
before_script:
- !reference [".fast-no-clone-job", before_script]
- [...]
```
- The job will set the `GIT_STRATEGY` to `none`.
- The files are downloaded from:
- The current project, on the current `CI_COMMIT_SHA` if the project is **public**
- The canonical project, on the `master` branch if the project **isn't public**
Below is an example on how to convert a job using this pattern:
```yaml
# Before
my-job:
image: ruby
stage: prepare
script: # This job requires two files to function
- source ./scripts/rspec_helpers.sh
- source ./scripts/slack
- echo "The files were successfully sourced!"
# After
my-job:
extends:
- .fast-no-clone-job
image: ruby
stage: prepare
variables:
FILES_TO_DOWNLOAD: >
scripts/rspec_helpers.sh
scripts/slack
script: # This job requires two files to function
- source ./scripts/rspec_helpers.sh
- source ./scripts/slack
- echo "The files were successfully sourced!"
```
#### Caveats
- This pattern does not work if a script relies on `git` to access the repository, because we don't have the repository without cloning or fetching.
- Given that we do not require setting up any API tokens to make this work, **we cannot download the files from any private repository**.
In that case, we will attempt to download the files from [https://gitlab.com/gitlab-org/gitlab](https://gitlab.com/gitlab-org/gitlab), which is a public repository.
Changes made in the private repository will not take effects for the files it's downloading for this reason,
and older builds can break if the files it's downloading are changed in a non-backwards compatible way.
Do not use this pattern for jobs which might block pipelines in private repositories like `security` or `dev`.
#### Where is this pattern used?
- For now, we use this pattern for the following jobs, and those do not block private repositories:
- `review-build-cng-env` for:
- `scripts/trigger-build.rb`
- `review-deploy` for:
- `scripts/review_apps/review-apps.sh`
- `scripts/review_apps/seed-dast-test-data.sh`
- `GITLAB_SHELL_VERSION`
- `GITALY_SERVER_VERSION`
- `GITLAB_WORKHORSE_VERSION`
Additionally, `scripts/utils.sh` will always be downloaded from the API when this pattern is used (this file contains the code for `.fast-no-clone-job`).

View File

@ -47,6 +47,16 @@ If you want to preserve this functionality, you can follow one of these two path
## Removed in 15.7
### File Type variable expansion in `.gitlab-ci.yml`
WARNING:
This is a [breaking change](https://docs.gitlab.com/ee/development/deprecation_guidelines/).
Review the details carefully before upgrading.
Prior to this change, variables that referenced or applied alias file variables expanded the value of the `File` type variable. For example, the file contents. This behavior was incorrect because it did not comply with typical shell variable expansion rules. A user could run an $echo command with the variable as an input parameter to leak secrets or sensitive information stored in 'File' type variables.
In 15.7, we are removing file type variable expansion from GitLab. It is essential to check your CI pipelines to confirm if your scripts reference a file variable. If your CI job relies on the previous expansion functionality, that CI job will not work and generate an error as of 15.7. The new behavior is that variable expansion that reference or apply alias file variables expand to the file name or path of the `File` type variable, instead of its value, such as the file contents.
### Flowdock integration
As of December 22, 2022, we are removing the Flowdock integration because the service was shut down on August 15, 2022.

View File

@ -80,6 +80,11 @@ To migrate groups by direct transfer:
[enabled in application settings](../../admin_area/settings/visibility_and_access_controls.md#enable-migration-of-groups-and-projects-by-direct-transfer)
by an instance administrator.
- The source GitLab instance must be running GitLab 14.0 or later.
- You must have a [personal access token](../../../user/profile/personal_access_tokens.md) for the source GitLab
instance:
- For GitLab 15.1 and later source instances, the personal access token must have the `api` scope.
- For GitLab 15.0 and earlier source instances, the personal access token must have both the `api` and
`read_repository` scopes.
- You must have the Owner role on the source group to migrate from.
- You must have at least the Maintainer role on the destination group to migrate to. Using the Developer role for this
purpose was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/387891) in GitLab 15.8 and will be removed in
@ -111,10 +116,6 @@ Create the group you want to import to and connect the source GitLab instance:
- Select **New subgroup**.
- On the top bar, Select **{plus-square}** and then **New subgroup**. Then on the left sidebar, select the **import an existing group** link.
1. Enter the URL of a GitLab instance running GitLab 14.0 or later.
1. Generate or copy a [personal access token](../../../user/profile/personal_access_tokens.md) on your source GitLab
instance with:
- The `api` scope, if the source GitLab instance on version 15.1 or later.
- Both `api` and `read_repository` scopes, if the source GitLab instance on version 15.0 or earlier.
1. Enter the [personal access token](../../../user/profile/personal_access_tokens.md) for your source GitLab instance.
1. Select **Connect instance**.

View File

@ -145,6 +145,38 @@ To view the **Branch rules overview** list:
## Troubleshooting
### Multiple branches containing the same commit
At a deeper technical level, Git branches aren't separate entities, but labels
attached to a set of commit SHAs. When GitLab determines whether or not a branch has been
merged, it checks the target branch for the existence of those commit SHAs.
This behavior can cause unexpected results when two merge requests contain the same
commits. In this example, branches `B` and `C` both start from the same commit (`3`)
on branch `A`:
```mermaid
gitGraph
commit id:"a"
branch "branch A"
commit id:"b"
commit id:"c" type: HIGHLIGHT
branch "branch B"
commit id:"d"
checkout "branch A"
branch "branch C"
commit id:"e"
checkout main
merge "branch B" id:"merges commits b, c, d"
```
If you merge branch `B`, branch `A` also appears as merged (without any action from you)
because all commits from branch `A` now appear in the target branch `main`. Branch `C`
remains unmerged, because commit `5` wasn't part of branch `A` or `B`.
Merge request `A` remains merged, even if you attempt to push new commits
to its branch. If any changes in merge request `A` remain unmerged (because they
weren't part of merge request `A`), open a new merge request for them.
### Error: ambiguous `HEAD` branch exists
In versions of Git earlier than 2.16.0, you could create a branch named `HEAD`.

View File

@ -111,6 +111,9 @@ You can download the source code that's stored in a repository.
- **Artifacts:**
Download the artifacts from the latest CI job.
The checksums of generated archives can change even if the repository itself doesn't
change. This can occur, for example, if Git or a third-party library that GitLab uses changes.
## Repository languages
For the default branch of each repository, GitLab determines which programming languages

View File

@ -35,6 +35,13 @@ is updated every 90 minutes.
If your namespace shows `'Not applicable.'`, push a commit to any project in the
namespace to recalculate the storage.
### Container Registry usage **(FREE SAAS)**
Container Registry usage is available only for GitLab.com. This feature requires a
[new version](https://about.gitlab.com/blog/2022/04/12/next-generation-container-registry/)
of the GitLab Container Registry. To learn about the proposed release for self-managed
installations, see [epic 5521](https://gitlab.com/groups/gitlab-org/-/epics/5521).
### Storage usage statistics
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68898) project-level graph in GitLab 14.4 [with a flag](../administration/feature_flags.md) named `project_storage_ui`. Disabled by default.

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This rebalances partition_id to fix invalid records in production
class RebalancePartitionId < BatchedMigrationJob
INVALID_PARTITION_ID = 101
VALID_PARTITION_ID = 100
scope_to ->(relation) { relation.where(partition_id: INVALID_PARTITION_ID) }
operation_name :update_all
feature_category :continuous_integration
def perform
each_sub_batch do |sub_batch|
sub_batch.update_all(partition_id: VALID_PARTITION_ID)
end
end
end
end
end

View File

@ -5,16 +5,8 @@ module Gitlab
module Pipeline
module Chain
class CancelPendingPipelines < Chain::Base
include Chain::Helpers
BATCH_SIZE = 25
def perform!
if Feature.enabled?(:move_cancel_pending_pipelines_to_async, project)
::Ci::CancelRedundantPipelinesWorker.perform_async(pipeline.id)
else
::Ci::PipelineCreation::CancelRedundantPipelinesService.new(pipeline).execute
end
::Ci::CancelRedundantPipelinesWorker.perform_async(pipeline.id)
end
def break?

View File

@ -176,3 +176,5 @@ module Gitlab
end
end
end
Gitlab::ImportExport::Importer.prepend_mod_with('Gitlab::ImportExport::Importer')

View File

@ -5194,6 +5194,9 @@ msgstr ""
msgid "Approve"
msgstr ""
msgid "Approve All"
msgstr ""
msgid "Approve a merge request"
msgstr ""
@ -5284,6 +5287,9 @@ msgstr ""
msgid "Are you sure you want to approve %{user}?"
msgstr ""
msgid "Are you sure you want to approve all users?"
msgstr ""
msgid "Are you sure you want to attempt to merge?"
msgstr ""
@ -6630,12 +6636,18 @@ msgstr ""
msgid "Billing|Add seats"
msgstr ""
msgid "Billing|All members were successfully approved"
msgstr ""
msgid "Billing|An email address is only visible for users with public emails."
msgstr ""
msgid "Billing|An error occurred while approving %{user}"
msgstr ""
msgid "Billing|An error occurred while approving all members"
msgstr ""
msgid "Billing|An error occurred while getting a billable member details."
msgstr ""
@ -50295,6 +50307,9 @@ msgstr ""
msgid "example.com"
msgstr ""
msgid "exceeds maximum length (100 user ids)"
msgstr ""
msgid "exceeds maximum length (100 usernames)"
msgstr ""
@ -51368,6 +51383,9 @@ msgstr ""
msgid "should be an array of %{object_name} objects"
msgstr ""
msgid "should be an array of existing user ids. %{invalid} does not exist"
msgstr ""
msgid "should be an array of existing usernames. %{invalid} does not exist"
msgstr ""

View File

@ -22,7 +22,7 @@ module QA
element :copy_contents_button
end
base.view 'app/assets/javascripts/vue_shared/components/source_viewer/source_viewer.vue' do
base.view 'app/assets/javascripts/vue_shared/components/source_viewer/source_viewer_deprecated.vue' do
element :blob_viewer_file_content
end
end

View File

@ -287,20 +287,3 @@ function setup_gcloud() {
gcloud auth activate-service-account --key-file="${REVIEW_APPS_GCP_CREDENTIALS}"
gcloud config set project "${REVIEW_APPS_GCP_PROJECT}"
}
function download_files() {
# If public fork, just download the files directly from there. Otherwise, get files from canonical.
if [[ "${CI_PROJECT_VISIBILITY}" == "public" ]]; then
local url="${CI_PROJECT_URL}/raw/${CI_COMMIT_SHA}"
else
local url="https://gitlab.com/gitlab-org/gitlab/raw/master"
fi
# Loop through all files and download them one by one sequentially.
for file in "$@"; do
local file_url="${url}/${file}"
echo "Downloading file: ${file_url}"
curl "${file_url}" --create-dirs --output "${file}"
done
}

View File

@ -124,6 +124,38 @@ RSpec.describe Projects::ArtifactsController do
end
end
context 'when artifact is set as private' do
let(:filename) { job.artifacts_file.filename }
before do
job.job_artifacts.update_all(accessibility: 'private')
end
context 'and user is not authoirized' do
let(:user) { create(:user) }
it 'returns forbidden' do
download_artifact(file_type: 'archive')
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'and user has access to project' do
it 'downloads' do
expect(controller).to receive(:send_file)
.with(
job.artifacts_file.file.path,
hash_including(disposition: 'attachment', filename: filename)).and_call_original
download_artifact(file_type: 'archive')
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers['Content-Disposition']).to eq(%Q(attachment; filename="#{filename}"; filename*=UTF-8''#{filename}))
end
end
end
context 'when a file type is supplied' do
context 'when an invalid file type is supplied' do
let(:file_type) { 'invalid' }

View File

@ -1,101 +0,0 @@
import MockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
import { setHTMLFixture } from 'helpers/fixtures';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { select2AxiosTransport } from '~/lib/utils/select2_utils';
import 'select2/select2';
const TEST_URL = '/test/api/url';
const TEST_SEARCH_DATA = { extraSearch: 'test' };
const TEST_DATA = [{ id: 1 }];
const TEST_SEARCH = 'FOO';
describe('lib/utils/select2_utils', () => {
let mock;
let resultsSpy;
beforeEach(() => {
setHTMLFixture('<div><input id="root" /></div>');
mock = new MockAdapter(axios);
resultsSpy = jest.fn().mockReturnValue({ results: [] });
});
afterEach(() => {
mock.restore();
});
const setupSelect2 = (input) => {
input.select2({
ajax: {
url: TEST_URL,
quietMillis: 250,
transport: select2AxiosTransport,
data(search, page) {
return {
search,
page,
...TEST_SEARCH_DATA,
};
},
results: resultsSpy,
},
});
};
const setupSelect2AndSearch = async () => {
const $input = $('#root');
setupSelect2($input);
$input.select2('search', TEST_SEARCH);
jest.runOnlyPendingTimers();
await waitForPromises();
};
describe('select2AxiosTransport', () => {
it('uses axios to make request', async () => {
// setup mock response
const replySpy = jest.fn();
mock.onGet(TEST_URL).reply((...args) => replySpy(...args));
await setupSelect2AndSearch();
expect(replySpy).toHaveBeenCalledWith(
expect.objectContaining({
url: TEST_URL,
method: 'get',
params: {
page: 1,
search: TEST_SEARCH,
...TEST_SEARCH_DATA,
},
}),
);
});
it.each`
headers | pagination
${{}} | ${{ more: false }}
${{ 'X-PAGE': '1', 'x-next-page': 2 }} | ${{ more: true }}
`(
'passes results and pagination to results callback, with headers=$headers',
async ({ headers, pagination }) => {
mock.onGet(TEST_URL).reply(HTTP_STATUS_OK, TEST_DATA, headers);
await setupSelect2AndSearch();
expect(resultsSpy).toHaveBeenCalledWith(
{ results: TEST_DATA, pagination },
1,
expect.anything(),
);
},
);
});
});

View File

@ -16,7 +16,7 @@ import ForkSuggestion from '~/repository/components/fork_suggestion.vue';
import { loadViewer } from '~/repository/components/blob_viewers';
import DownloadViewer from '~/repository/components/blob_viewers/download_viewer.vue';
import EmptyViewer from '~/repository/components/blob_viewers/empty_viewer.vue';
import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer.vue';
import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer_deprecated.vue';
import blobInfoQuery from 'shared_queries/repository/blob_info.query.graphql';
import projectInfoQuery from '~/repository/queries/project_info.query.graphql';
import userInfoQuery from '~/repository/queries/user_info.query.graphql';

View File

@ -1,7 +1,7 @@
import { nextTick } from 'vue';
import { GlIntersectionObserver } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import Chunk from '~/vue_shared/components/source_viewer/components/chunk.vue';
import Chunk from '~/vue_shared/components/source_viewer/components/chunk_deprecated.vue';
import ChunkLine from '~/vue_shared/components/source_viewer/components/chunk_line.vue';
import LineHighlighter from '~/blob/line_highlighter';

View File

@ -2,9 +2,9 @@ import hljs from 'highlight.js/lib/core';
import Vue from 'vue';
import VueRouter from 'vue-router';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer.vue';
import SourceViewer from '~/vue_shared/components/source_viewer/source_viewer_deprecated.vue';
import { registerPlugins } from '~/vue_shared/components/source_viewer/plugins/index';
import Chunk from '~/vue_shared/components/source_viewer/components/chunk.vue';
import Chunk from '~/vue_shared/components/source_viewer/components/chunk_deprecated.vue';
import {
EVENT_ACTION,
EVENT_LABEL_VIEWER,

View File

@ -0,0 +1,46 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::RebalancePartitionId,
:migration,
schema: 20230125093723,
feature_category: :continuous_integration do
let(:ci_builds_table) { table(:ci_builds, database: :ci) }
let(:ci_pipelines_table) { table(:ci_pipelines, database: :ci) }
let!(:valid_ci_pipeline) { ci_pipelines_table.create!(id: 1, partition_id: 100) }
let!(:invalid_ci_pipeline) { ci_pipelines_table.create!(id: 2, partition_id: 101) }
describe '#perform' do
using RSpec::Parameterized::TableSyntax
where(:table_name, :invalid_record, :valid_record) do
:ci_pipelines | invalid_ci_pipeline | valid_ci_pipeline
end
subject(:perform) do
described_class.new(
start_id: 1,
end_id: 2,
batch_table: table_name,
batch_column: :id,
sub_batch_size: 1,
pause_ms: 0,
connection: Ci::ApplicationRecord.connection
).perform
end
shared_examples 'fix invalid records' do
it 'rebalances partition_id to 100 when partition_id is 101' do
expect { perform }
.to change { invalid_record.reload.partition_id }.from(101).to(100)
.and not_change { valid_record.reload.partition_id }
end
end
with_them do
it_behaves_like 'fix invalid records'
end
end
end

View File

@ -5,225 +5,17 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines, feature_category: :continuous_integration do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let(:prev_pipeline) { create(:ci_pipeline, project: project) }
let(:new_commit) { create(:commit, project: project) }
let(:pipeline) { create(:ci_pipeline, project: project, sha: new_commit.sha) }
let(:command) do
Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user)
end
let(:step) { described_class.new(pipeline, command) }
before do
stub_feature_flags(move_cancel_pending_pipelines_to_async: false)
create(:ci_build, :interruptible, :running, pipeline: prev_pipeline)
create(:ci_build, :interruptible, :success, pipeline: prev_pipeline)
create(:ci_build, :created, pipeline: prev_pipeline)
create(:ci_build, :interruptible, pipeline: pipeline)
end
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be(:command) { Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user) }
let_it_be(:step) { described_class.new(pipeline, command) }
describe '#perform!' do
subject(:perform) { step.perform! }
before do
expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
expect(build_statuses(pipeline)).to contain_exactly('pending')
it 'enqueues CancelRedundantPipelinesWorker' do
expect(Ci::CancelRedundantPipelinesWorker).to receive(:perform_async).with(pipeline.id)
subject
end
context 'when auto-cancel is enabled' do
before do
project.update!(auto_cancel_pending_pipelines: 'enabled')
end
it 'cancels only previous interruptible builds' do
perform
expect(build_statuses(prev_pipeline)).to contain_exactly('canceled', 'success', 'canceled')
expect(build_statuses(pipeline)).to contain_exactly('pending')
end
it 'logs canceled pipelines' do
allow(Gitlab::AppLogger).to receive(:info)
perform
expect(Gitlab::AppLogger).to have_received(:info).with(
class: "Ci::PipelineCreation::CancelRedundantPipelinesService",
message: "Pipeline #{pipeline.id} auto-canceling pipeline #{prev_pipeline.id}",
canceled_pipeline_id: prev_pipeline.id,
canceled_by_pipeline_id: pipeline.id,
canceled_by_pipeline_source: pipeline.source
)
end
it 'cancels the builds with 2 queries to avoid query timeout' do
second_query_regex = /WHERE "ci_pipelines"\."id" = \d+ AND \(NOT EXISTS/
recorder = ActiveRecord::QueryRecorder.new { perform }
second_query = recorder.occurrences.keys.filter { |occ| occ =~ second_query_regex }
expect(second_query).to be_one
end
context 'when the previous pipeline has a child pipeline' do
let(:child_pipeline) { create(:ci_pipeline, child_of: prev_pipeline) }
context 'when the child pipeline has interruptible running jobs' do
before do
create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
end
it 'cancels all child pipeline builds' do
expect(build_statuses(child_pipeline)).to contain_exactly('running', 'running')
perform
expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled')
end
context 'when the child pipeline includes completed interruptible jobs' do
before do
create(:ci_build, :interruptible, :failed, pipeline: child_pipeline)
create(:ci_build, :interruptible, :success, pipeline: child_pipeline)
end
it 'cancels all child pipeline builds with a cancelable_status' do
expect(build_statuses(child_pipeline)).to contain_exactly('running', 'running', 'failed', 'success')
perform
expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled', 'failed', 'success')
end
end
end
context 'when the child pipeline has started non-interruptible job' do
before do
create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
# non-interruptible started
create(:ci_build, :success, pipeline: child_pipeline)
end
it 'does not cancel any child pipeline builds' do
expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
perform
expect(build_statuses(child_pipeline)).to contain_exactly('running', 'success')
end
end
context 'when the child pipeline has non-interruptible non-started job' do
before do
create(:ci_build, :interruptible, :running, pipeline: child_pipeline)
end
not_started_statuses = Ci::HasStatus::AVAILABLE_STATUSES - Ci::HasStatus::STARTED_STATUSES
context 'when the jobs are cancelable' do
cancelable_not_started_statuses = Set.new(not_started_statuses).intersection(Ci::HasStatus::CANCELABLE_STATUSES)
cancelable_not_started_statuses.each do |status|
it "cancels all child pipeline builds when build status #{status} included" do
# non-interruptible but non-started
create(:ci_build, status.to_sym, pipeline: child_pipeline)
expect(build_statuses(child_pipeline)).to contain_exactly('running', status)
perform
expect(build_statuses(child_pipeline)).to contain_exactly('canceled', 'canceled')
end
end
end
context 'when the jobs are not cancelable' do
not_cancelable_not_started_statuses = not_started_statuses - Ci::HasStatus::CANCELABLE_STATUSES
not_cancelable_not_started_statuses.each do |status|
it "does not cancel child pipeline builds when build status #{status} included" do
# non-interruptible but non-started
create(:ci_build, status.to_sym, pipeline: child_pipeline)
expect(build_statuses(child_pipeline)).to contain_exactly('running', status)
perform
expect(build_statuses(child_pipeline)).to contain_exactly('canceled', status)
end
end
end
end
end
context 'when the pipeline is a child pipeline' do
let!(:parent_pipeline) { create(:ci_pipeline, project: project, sha: new_commit.sha) }
let(:pipeline) { create(:ci_pipeline, child_of: parent_pipeline) }
before do
create(:ci_build, :interruptible, :running, pipeline: parent_pipeline)
create(:ci_build, :interruptible, :running, pipeline: parent_pipeline)
end
it 'does not cancel any builds' do
expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
perform
expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
expect(build_statuses(parent_pipeline)).to contain_exactly('running', 'running')
end
end
context 'when the previous pipeline source is webide' do
let(:prev_pipeline) { create(:ci_pipeline, :webide, project: project) }
it 'does not cancel builds of the previous pipeline' do
perform
expect(build_statuses(prev_pipeline)).to contain_exactly('created', 'running', 'success')
expect(build_statuses(pipeline)).to contain_exactly('pending')
end
end
end
context 'when auto-cancel is disabled' do
before do
project.update!(auto_cancel_pending_pipelines: 'disabled')
end
it 'does not cancel any build' do
subject
expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
expect(build_statuses(pipeline)).to contain_exactly('pending')
end
end
context 'when feature flag move_cancel_pending_pipelines_to_async is enabled' do
before do
stub_feature_flags(move_cancel_pending_pipelines_to_async: true)
end
it 'enqueues CancelRedundantPipelinesWorker' do
expect(Ci::CancelRedundantPipelinesWorker).to receive(:perform_async).with(pipeline.id)
subject
end
it 'does not do any synchronous processing' do
subject
expect(build_statuses(prev_pipeline)).to contain_exactly('running', 'success', 'created')
expect(build_statuses(pipeline)).to contain_exactly('pending')
end
end
end
private
def build_statuses(pipeline)
pipeline.builds.pluck(:status)
end
end

View File

@ -6,8 +6,8 @@ RSpec.describe Gitlab::ImportExport::Importer do
let(:user) { create(:user) }
let(:test_path) { "#{Dir.tmpdir}/importer_spec" }
let(:shared) { project.import_export_shared }
let(:project) { create(:project) }
let(:import_file) { fixture_file_upload('spec/features/projects/import_export/test_project_export.tar.gz') }
let_it_be(:project) { create(:project) }
subject(:importer) { described_class.new(project) }

View File

@ -0,0 +1,58 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe RebalancePartitionIdCiPipeline, migration: :gitlab_ci, feature_category: :continuous_integration do
let(:migration) { described_class::MIGRATION }
context 'when on sass' do
before do
allow(Gitlab).to receive(:com?).and_return(true)
end
describe '#up' do
it 'schedules background jobs for each batch of ci_builds' do
migrate!
expect(migration).to have_scheduled_batched_migration(
gitlab_schema: :gitlab_ci,
table_name: :ci_pipelines,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
end
end
describe '#down' do
it 'deletes all batched migration records' do
migrate!
schema_migrate_down!
expect(migration).not_to have_scheduled_batched_migration
end
end
end
context 'when on self-managed instance' do
let(:migration) { described_class.new }
describe '#up' do
it 'does not schedule background job' do
expect(migration).not_to receive(:queue_batched_background_migration)
migration.up
end
end
describe '#down' do
it 'does not delete background job' do
expect(migration).not_to receive(:delete_batched_background_migration)
migration.down
end
end
end
end

View File

@ -2235,19 +2235,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
describe '#artifacts_file_for_type' do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
let(:file_type) { :archive }
subject { build.artifacts_file_for_type(file_type) }
it 'queries artifacts for type' do
expect(build).to receive_message_chain(:job_artifacts, :find_by).with(file_type: [Ci::JobArtifact.file_types[file_type]])
subject
end
end
describe '#merge_request' do
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
@ -5793,27 +5780,49 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
describe 'secrets management id_tokens usage data' do
context 'when ID tokens are defined' do
let(:ci_build) { FactoryBot.build(:ci_build, user: user, id_tokens: { 'ID_TOKEN_1' => { aud: 'developers' } }) }
context 'on create' do
it 'tracks event with user_id' do
let(:ci_build) { FactoryBot.build(:ci_build, user: user, id_tokens: { 'ID_TOKEN_1' => { aud: 'developers' } }) }
it 'tracks RedisHLL event with user_id' do
expect(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
.with('i_ci_secrets_management_id_tokens_build_created', values: user.id)
ci_build.save!
end
it 'tracks Snowplow event with RedisHLL context' do
params = {
category: described_class.to_s,
action: 'create_id_tokens',
namespace: ci_build.namespace,
user: user,
label: 'redis_hll_counters.ci_secrets_management.i_ci_secrets_management_id_tokens_build_created_monthly',
ultimate_namespace_id: ci_build.namespace.root_ancestor.id,
context: [Gitlab::Tracking::ServicePingContext.new(
data_source: :redis_hll,
event: 'i_ci_secrets_management_id_tokens_build_created'
).to_context.to_json]
}
ci_build.save!
expect_snowplow_event(**params)
end
end
context 'on update' do
before do
ci_build.save!
end
let_it_be(:ci_build) { create(:ci_build, user: user, id_tokens: { 'ID_TOKEN_1' => { aud: 'developers' } }) }
it 'does not track event' do
it 'does not track RedisHLL event' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
ci_build.success
end
it 'does not track Snowplow event' do
ci_build.success
expect_no_snowplow_event
end
end
end
@ -5821,11 +5830,16 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
let(:ci_build) { FactoryBot.build(:ci_build, user: user) }
context 'on create' do
it 'does not track event' do
it 'does not track RedisHLL event' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
ci_build.save!
end
it 'does not track Snowplow event' do
ci_build.save!
expect_no_snowplow_event
end
end
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe SystemNoteService do
RSpec.describe SystemNoteService, feature_category: :shared do
include Gitlab::Routing
include RepoHelpers
include AssetsHelpers