Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-12-06 00:21:31 +00:00
parent 97f7f0df56
commit 870454782f
112 changed files with 2218 additions and 287 deletions

View File

@ -8,7 +8,6 @@ RSpec/ExcessiveDocstringSpacing:
- 'ee/spec/finders/ee/issuables/label_filter_spec.rb'
- 'ee/spec/finders/ee/namespaces/projects_finder_spec.rb'
- 'ee/spec/graphql/mutations/ai/action_spec.rb'
- 'ee/spec/helpers/ee/members_helper_spec.rb'
- 'ee/spec/helpers/tree_helper_spec.rb'
- 'ee/spec/lib/ee/gitlab/ci/config/entry/bridge_spec.rb'
- 'ee/spec/lib/elastic/latest/git_class_proxy_spec.rb'

View File

@ -226,11 +226,11 @@
{"name":"gitlab-dangerfiles","version":"4.8.0","platform":"ruby","checksum":"b327d079552ec974a63bf34d749a0308425af6ebf51d01064f1a6ff216a523db"},
{"name":"gitlab-experiment","version":"0.9.1","platform":"ruby","checksum":"f230ee742154805a755d5f2539dc44d93cdff08c5bbbb7656018d61f93d01f48"},
{"name":"gitlab-fog-azure-rm","version":"2.2.0","platform":"ruby","checksum":"31aa7c2170f57874053144e7f716ec9e15f32e71ffbd2c56753dce46e2e78ba9"},
{"name":"gitlab-glfm-markdown","version":"0.0.21","platform":"aarch64-linux","checksum":"e2bea2e58b4553fc908d9bf947beafb977a639868f68df52eec5b2a2036ebb9e"},
{"name":"gitlab-glfm-markdown","version":"0.0.21","platform":"arm64-darwin","checksum":"85668cb0cb8e361e40682899fab76df1c623540cab561489f6a8e057a3cbaf8b"},
{"name":"gitlab-glfm-markdown","version":"0.0.21","platform":"ruby","checksum":"cb960ac1bc509d72b460c9dc934fb0a02cf061a5de6b1b00c72b794817d63b40"},
{"name":"gitlab-glfm-markdown","version":"0.0.21","platform":"x86_64-darwin","checksum":"8425ee27e0b32b75619e08e1700c1302297b44928adc19a026bea243c96363f5"},
{"name":"gitlab-glfm-markdown","version":"0.0.21","platform":"x86_64-linux","checksum":"9ea7d7a7a20c15960839521459a82edab787a4d8475ee412beba8362aa5fcd71"},
{"name":"gitlab-glfm-markdown","version":"0.0.23","platform":"aarch64-linux","checksum":"2debf90c2d7b03e282a88951ad39a8f9bfc1662be7329f6dbc66c56cf9f2c17a"},
{"name":"gitlab-glfm-markdown","version":"0.0.23","platform":"arm64-darwin","checksum":"e1d5fe80b52263041c1e91d85b4fd9eb367c20e2bcda817132bfcf671ea1874a"},
{"name":"gitlab-glfm-markdown","version":"0.0.23","platform":"ruby","checksum":"89a12909c39aea326adb0b7194f7b89d61b4f9122308435fba0bcb84e4f4ff24"},
{"name":"gitlab-glfm-markdown","version":"0.0.23","platform":"x86_64-darwin","checksum":"4b77a37358d98c3b2269f7dd19f6549555c5de00bf12a4eca25c34076f72f78d"},
{"name":"gitlab-glfm-markdown","version":"0.0.23","platform":"x86_64-linux","checksum":"2b71ec5ae06a524114e2cf423ce6635fd1f5c6776c0c956188aa0b2f0fbfbead"},
{"name":"gitlab-kas-grpc","version":"17.5.1","platform":"ruby","checksum":"88639bfaa9301d78a7fbff696ec262ed696a15a6f41c1b51bffe6b39c7a61ca7"},
{"name":"gitlab-labkit","version":"0.37.0","platform":"ruby","checksum":"d2dd0a60db2149a9a8eebf2975dc23f54ac3ceb01bdba732eb1b26b86dfffa70"},
{"name":"gitlab-license","version":"2.6.0","platform":"ruby","checksum":"2c1f8ae73835640ec77bf758c1d0c9730635043c01cf77902f7976e826d7d016"},

View File

@ -745,7 +745,7 @@ GEM
mime-types
net-http-persistent (~> 4.0)
nokogiri (~> 1, >= 1.10.8)
gitlab-glfm-markdown (0.0.21)
gitlab-glfm-markdown (0.0.23)
rb_sys (= 0.9.94)
gitlab-kas-grpc (17.5.1)
grpc (~> 1.0)

View File

@ -227,11 +227,11 @@
{"name":"gitlab-dangerfiles","version":"4.8.0","platform":"ruby","checksum":"b327d079552ec974a63bf34d749a0308425af6ebf51d01064f1a6ff216a523db"},
{"name":"gitlab-experiment","version":"0.9.1","platform":"ruby","checksum":"f230ee742154805a755d5f2539dc44d93cdff08c5bbbb7656018d61f93d01f48"},
{"name":"gitlab-fog-azure-rm","version":"2.2.0","platform":"ruby","checksum":"31aa7c2170f57874053144e7f716ec9e15f32e71ffbd2c56753dce46e2e78ba9"},
{"name":"gitlab-glfm-markdown","version":"0.0.21","platform":"aarch64-linux","checksum":"e2bea2e58b4553fc908d9bf947beafb977a639868f68df52eec5b2a2036ebb9e"},
{"name":"gitlab-glfm-markdown","version":"0.0.21","platform":"arm64-darwin","checksum":"85668cb0cb8e361e40682899fab76df1c623540cab561489f6a8e057a3cbaf8b"},
{"name":"gitlab-glfm-markdown","version":"0.0.21","platform":"ruby","checksum":"cb960ac1bc509d72b460c9dc934fb0a02cf061a5de6b1b00c72b794817d63b40"},
{"name":"gitlab-glfm-markdown","version":"0.0.21","platform":"x86_64-darwin","checksum":"8425ee27e0b32b75619e08e1700c1302297b44928adc19a026bea243c96363f5"},
{"name":"gitlab-glfm-markdown","version":"0.0.21","platform":"x86_64-linux","checksum":"9ea7d7a7a20c15960839521459a82edab787a4d8475ee412beba8362aa5fcd71"},
{"name":"gitlab-glfm-markdown","version":"0.0.23","platform":"aarch64-linux","checksum":"2debf90c2d7b03e282a88951ad39a8f9bfc1662be7329f6dbc66c56cf9f2c17a"},
{"name":"gitlab-glfm-markdown","version":"0.0.23","platform":"arm64-darwin","checksum":"e1d5fe80b52263041c1e91d85b4fd9eb367c20e2bcda817132bfcf671ea1874a"},
{"name":"gitlab-glfm-markdown","version":"0.0.23","platform":"ruby","checksum":"89a12909c39aea326adb0b7194f7b89d61b4f9122308435fba0bcb84e4f4ff24"},
{"name":"gitlab-glfm-markdown","version":"0.0.23","platform":"x86_64-darwin","checksum":"4b77a37358d98c3b2269f7dd19f6549555c5de00bf12a4eca25c34076f72f78d"},
{"name":"gitlab-glfm-markdown","version":"0.0.23","platform":"x86_64-linux","checksum":"2b71ec5ae06a524114e2cf423ce6635fd1f5c6776c0c956188aa0b2f0fbfbead"},
{"name":"gitlab-kas-grpc","version":"17.5.1","platform":"ruby","checksum":"88639bfaa9301d78a7fbff696ec262ed696a15a6f41c1b51bffe6b39c7a61ca7"},
{"name":"gitlab-labkit","version":"0.37.0","platform":"ruby","checksum":"d2dd0a60db2149a9a8eebf2975dc23f54ac3ceb01bdba732eb1b26b86dfffa70"},
{"name":"gitlab-license","version":"2.6.0","platform":"ruby","checksum":"2c1f8ae73835640ec77bf758c1d0c9730635043c01cf77902f7976e826d7d016"},

View File

@ -755,7 +755,7 @@ GEM
mime-types
net-http-persistent (~> 4.0)
nokogiri (~> 1, >= 1.10.8)
gitlab-glfm-markdown (0.0.21)
gitlab-glfm-markdown (0.0.23)
rb_sys (= 0.9.94)
gitlab-kas-grpc (17.5.1)
grpc (~> 1.0)

View File

@ -133,7 +133,7 @@ export default {
/>
<pre
:class="preClasses"
><code :class="{ 'gl-text-subtle gl-opacity-5': loading }">{{ query.trim() }}</code></pre>
><code :class="{ 'gl-opacity-5': loading }">{{ query.trim() }}</code></pre>
</div>
</div>
</template>

View File

@ -2,6 +2,7 @@
// eslint-disable-next-line no-restricted-imports
import { mapState, mapGetters } from 'vuex';
import { __ } from '~/locale';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import ScopeSidebarNavigation from '~/search/sidebar/components/scope_sidebar_navigation.vue';
import SidebarPortal from '~/super_sidebar/components/sidebar_portal.vue';
import { toggleSuperSidebarCollapsed } from '~/super_sidebar/super_sidebar_collapsed_state_manager';
@ -84,6 +85,11 @@ export default {
return this.currentScope === SCOPE_WIKI_BLOBS;
},
},
beforeCreate() {
if (!this.$store) {
Sentry.captureException('GlobalSearchSidebar was not provided a Vuex store');
}
},
methods: {
toggleFiltersFromSidebar() {
toggleSuperSidebarCollapsed();

View File

@ -0,0 +1,170 @@
<script>
// eslint-disable-next-line no-restricted-imports
import { mapActions, mapState } from 'vuex';
import { GlFormCheckbox, GlTooltipDirective } from '@gitlab/ui';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import { s__ } from '~/locale';
import AjaxCache from '~/lib/utils/ajax_cache';
import { mergeUrlParams } from '~/lib/utils/url_utility';
import FilterDropdown from '~/search/sidebar/components/shared/filter_dropdown.vue';
import {
SEARCH_ICON,
USER_ICON,
AUTHOR_ENDPOINT_PATH,
AUTHOR_PARAM,
NOT_AUTHOR_PARAM,
} from '../../constants';
export default {
name: 'AuthorFilter',
components: {
FilterDropdown,
GlFormCheckbox,
},
directives: {
GlTooltip: GlTooltipDirective,
},
data() {
return {
authors: [],
error: '',
toggleState: false,
selectedAuthorName: '',
selectedAuthorValue: '',
isLoading: false,
searchTerm: '',
};
},
i18n: {
toggleTooltip: s__('GlobalSearch|Toggle if results have source branch included or excluded'),
author: s__('GlobalSearch|Author'),
search: s__('GlobalSearch|Search'),
authorNotIncluded: s__('GlobalSearch|Author not included'),
},
computed: {
...mapState(['groupInitialJson', 'projectInitialJson', 'query']),
showDropdownPlaceholderText() {
return this.selectedAuthorName ? this.selectedAuthorName : this.$options.i18n.search;
},
showDropdownPlaceholderIcon() {
return this.selectedAuthorName ? USER_ICON : SEARCH_ICON;
},
},
watch: {
authors(newAuthors) {
if (newAuthors.length > 0 && this.selectedAuthorValue) {
this.selectedAuthorName = this.convertValueToName(this.selectedAuthorValue);
}
this.handleSelected(this.selectedAuthorValue);
},
},
mounted() {
this.selectedAuthorValue = this.query?.[AUTHOR_PARAM] || this.query?.[NOT_AUTHOR_PARAM];
this.toggleState = Boolean(this.query?.[NOT_AUTHOR_PARAM]);
if (this.selectedAuthorValue) {
this.getCachedDropdownData();
}
},
methods: {
...mapActions(['setQuery', 'applyQuery']),
getDropdownAPIEndpoint() {
const endpoint = `${gon.relative_url_root || ''}${AUTHOR_ENDPOINT_PATH}`;
const params = {
current_user: true,
active: true,
group_id: this.groupInitialJson?.id || null,
project_id: this.projectInitialJson?.id || null,
search: this.searchTerm,
};
return mergeUrlParams(params, endpoint);
},
convertToListboxItems(data) {
return data.map((item) => ({
text: item.name,
value: item.username,
}));
},
async getCachedDropdownData() {
this.isLoading = true;
try {
const data = await AjaxCache.retrieve(this.getDropdownAPIEndpoint());
this.error = '';
this.isLoading = false;
this.authors = this.convertToListboxItems(data);
} catch (error) {
Sentry.captureException(error);
this.isLoading = false;
this.error = error.message;
}
},
handleSelected(selectedAuthorValue) {
this.selectedAuthorName = this.convertValueToName(selectedAuthorValue);
this.selectedAuthorValue = selectedAuthorValue;
if (this.toggleState) {
this.setNotAuthorParam(selectedAuthorValue);
return;
}
this.setAuthorParam(selectedAuthorValue);
},
setAuthorParam(selectedAuthorValue) {
this.setQuery({ key: AUTHOR_PARAM, value: selectedAuthorValue });
this.setQuery({ key: NOT_AUTHOR_PARAM, value: '' });
},
setNotAuthorParam(selectedAuthorValue) {
this.setQuery({ key: NOT_AUTHOR_PARAM, value: selectedAuthorValue });
this.setQuery({ key: AUTHOR_PARAM, value: '' });
},
convertValueToName(selectedAuthorValue) {
const authorObj = this.authors.find((item) => item.value === selectedAuthorValue);
return authorObj?.text || selectedAuthorValue;
},
changeCheckboxInput(state) {
this.toggleState = state;
this.handleSelected(this.selectedAuthorValue);
},
handleSearch(searchTerm) {
this.searchTerm = searchTerm;
this.getCachedDropdownData();
},
handleReset() {
this.toggleState = false;
this.setQuery({ key: AUTHOR_PARAM, value: '' });
this.setQuery({ key: NOT_AUTHOR_PARAM, value: '' });
this.applyQuery();
},
},
};
</script>
<template>
<div class="gl-relative gl-pb-0 md:gl-pt-0">
<div class="gl-mb-2 gl-text-sm gl-font-bold" data-testid="author-filter-title">
{{ $options.i18n.author }}
</div>
<filter-dropdown
:list-data="authors"
:error="error"
:header-text="$options.i18n.author"
:search-text="showDropdownPlaceholderText"
:selected-item="selectedAuthorValue"
:icon="showDropdownPlaceholderIcon"
:is-loading="isLoading"
:has-api-search="true"
@search="handleSearch"
@selected="handleSelected"
@shown="getCachedDropdownData"
@reset="handleReset"
/>
<gl-form-checkbox
v-model="toggleState"
class="gl-mb-0 gl-inline-flex gl-w-full gl-grow gl-justify-between gl-pt-4"
@input="changeCheckboxInput"
>
<span v-gl-tooltip="$options.i18n.toggleTooltip" data-testid="author-filter-tooltip">
{{ $options.i18n.authorNotIncluded }}
</span>
</gl-form-checkbox>
</div>
</template>

View File

@ -8,6 +8,7 @@ import FiltersTemplate from './filters_template.vue';
import LabelFilter from './label_filter/index.vue';
import ArchivedFilter from './archived_filter/index.vue';
import SourceBranchFilter from './source_branch_filter/index.vue';
import AuthorFilter from './author_filter/index.vue';
export default {
name: 'MergeRequestsFilters',
@ -17,6 +18,7 @@ export default {
LabelFilter,
ArchivedFilter,
SourceBranchFilter,
AuthorFilter,
},
mixins: [glFeatureFlagsMixin()],
computed: {
@ -28,9 +30,12 @@ export default {
(!this.hasMissingProjectContext || this.groupInitialJson?.id)
);
},
shouldShowLabelFilter() {
isAdvancedSearch() {
return this.searchType === SEARCH_TYPE_ADVANCED;
},
shouldShowAuthorFilter() {
return this.isAdvancedSearch && this.glFeatures.searchMrFilterAuthor;
},
},
};
</script>
@ -38,8 +43,9 @@ export default {
<template>
<filters-template>
<status-filter class="gl-mb-5" />
<label-filter v-if="shouldShowLabelFilter" class="gl-mb-5" />
<archived-filter v-if="hasMissingProjectContext" class="gl-mb-5" />
<label-filter v-if="isAdvancedSearch" class="gl-mb-5" />
<source-branch-filter v-if="shouldShowSourceBranchFilter" class="gl-mb-5" />
<author-filter v-if="shouldShowAuthorFilter" class="gl-mb-5" />
</filters-template>
</template>

View File

@ -17,10 +17,10 @@ export default {
type: Array,
required: true,
},
errors: {
type: Array,
error: {
type: String,
required: false,
default: () => [],
default: '',
},
headerText: {
type: String,
@ -57,9 +57,7 @@ export default {
},
data() {
return {
selectedRef: '',
query: '',
hasError: this.errors.length > 0,
};
},
computed: {
@ -83,6 +81,9 @@ export default {
? this.$options.i18n.noSearchResultsText
: this.$options.i18n.noLoadResultsText;
},
hasError() {
return Boolean(this.error);
},
},
created() {
this.debouncedSearch = debounce(this.search, DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
@ -104,8 +105,8 @@ export default {
}
this.searchResults = fuzzaldrinPlus.filter(this.listData, this.query, { key: ['text'] });
},
selectRef(ref) {
this.$emit('selected', ref);
selectRef(selectedAuthorValue) {
this.$emit('selected', selectedAuthorValue);
},
onHide() {
if (!this.query || this.searchResults.length > 0) {
@ -149,13 +150,12 @@ export default {
</template>
<template #footer>
<div
v-for="errorMessage in errors"
:key="errorMessage"
data-testid="branch-dropdown-error-list"
v-if="hasError"
data-testid="branch-dropdown-error"
class="gl-mx-4 gl-my-3 gl-flex gl-items-start gl-text-red-500"
>
<gl-icon name="error" class="gl-mr-2 gl-mt-2 gl-shrink-0" />
<span>{{ errorMessage }}</span>
<span class="gl-max-w-full gl-break-all">{{ error }}</span>
</div>
</template>
</gl-collapsible-listbox>

View File

@ -2,6 +2,7 @@
// eslint-disable-next-line no-restricted-imports
import { mapActions, mapState } from 'vuex';
import { GlFormCheckbox, GlTooltipDirective } from '@gitlab/ui';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import { s__ } from '~/locale';
import AjaxCache from '~/lib/utils/ajax_cache';
import { mergeUrlParams } from '~/lib/utils/url_utility';
@ -31,7 +32,7 @@ export default {
data() {
return {
sourceBranches: [],
errors: [],
error: '',
toggleState: false,
selectedBranch: '',
isLoading: false,
@ -74,12 +75,13 @@ export default {
this.isLoading = true;
try {
const data = await AjaxCache.retrieve(this.getMergeRequestSourceBranchesEndpoint());
this.errors = [];
this.error = '';
this.isLoading = false;
this.sourceBranches = this.convertToListboxItems(data);
} catch (e) {
} catch (error) {
Sentry.captureException(error);
this.isLoading = false;
this.errors.push(e.message);
this.error = error.message;
}
},
handleSelected(ref) {
@ -122,7 +124,7 @@ export default {
</div>
<filter-dropdown
:list-data="sourceBranches"
:errors="errors"
:error="error"
:header-text="s__('GlobalSearch|Source branch')"
:search-text="showDropdownPlaceholderText"
:selected-item="selectedBranch"

View File

@ -61,6 +61,9 @@ export const EVENT_CLICK_ZOEKT_INCLUDE_FORKS_ON_SEARCH_RESULTS_PAGE =
export const EVENT_SELECT_SOURCE_BRANCH_FILTER_ON_MERGE_REQUEST_PAGE =
'select_source_branch_filter_on_merge_request_page';
export const EVENT_SELECT_AUTHOR_FILTER_ON_MERGE_REQUEST_PAGE =
'event_select_author_filter_on_merge_request_page';
export const LANGUAGE_DEFAULT_ITEM_LENGTH = 10;
export const LANGUAGE_MAX_ITEM_LENGTH = 100;

View File

@ -8,6 +8,8 @@ import {
SOURCE_BRANCH_PARAM,
NOT_SOURCE_BRANCH_PARAM,
STATE_FILTER_PARAM,
AUTHOR_PARAM,
NOT_AUTHOR_PARAM,
} from '~/search/sidebar/constants';
export const MAX_FREQUENT_ITEMS = 5;
@ -27,6 +29,8 @@ export const SIDEBAR_PARAMS = [
INCLUDE_FORKED_FILTER_PARAM,
SOURCE_BRANCH_PARAM,
NOT_SOURCE_BRANCH_PARAM,
AUTHOR_PARAM,
NOT_AUTHOR_PARAM,
];
export const REGEX_PARAM = 'regex';

View File

@ -236,6 +236,12 @@ module Types
description: 'IDE settings.',
method: :itself
field :type,
type: Types::Users::TypeEnum,
null: false,
description: 'Type of the user.',
method: :user_type
definition_methods do
def resolve_type(object, context)
# in the absence of other information, we cannot tell - just default to

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
module Types
module Users
class TypeEnum < BaseEnum
graphql_name 'UserType'
description 'Possible types of user'
User.user_types.each_key do |key|
value key.to_s.upcase, value: key.to_s, description: key.to_s.humanize.to_s
end
end
end
end

View File

@ -445,6 +445,7 @@ module Ci
end
scope :for_status, ->(status) { where(status: status) }
scope :created_after, ->(time) { where(arel_table[:created_at].gt(time)) }
scope :created_before, ->(time) { where(arel_table[:created_at].lt(time)) }
scope :created_before_id, ->(id) { where(arel_table[:id].lt(id)) }
scope :before_pipeline, ->(pipeline) { created_before_id(pipeline.id).outside_pipeline_family(pipeline) }
scope :with_pipeline_source, ->(source) { where(source: source) }

View File

@ -13,7 +13,6 @@ module Ci
include TaggableQueries
include Presentable
include EachBatch
include Ci::HasRunnerExecutor
include Ci::HasRunnerStatus
include Ci::Taggable
@ -88,6 +87,7 @@ module Ci
has_many :projects, through: :runner_projects, disable_joins: true
has_many :runner_namespaces, inverse_of: :runner, autosave: true
has_many :groups, through: :runner_namespaces, disable_joins: true
has_many :tag_links, class_name: 'Ci::RunnerTagging', inverse_of: :runner
# currently we have only 1 namespace assigned, but order is here for consistency
has_one :owner_runner_namespace, -> { order(:id) }, class_name: 'Ci::RunnerNamespace'
@ -330,6 +330,14 @@ module Ci
end
end
# TODO: Remove once https://gitlab.com/gitlab-org/gitlab/-/issues/504277 is closed.
def self.sharded_table_proxy_model
@sharded_table_proxy_class ||= Class.new(self) do
self.table_name = :ci_runners_e59bb2812d
self.primary_key = :id
end
end
def runner_matcher
Gitlab::Ci::Matching::RunnerMatcher.new({
runner_ids: [id],
@ -438,6 +446,44 @@ module Ci
tag_list.any?
end
override :save_tags
def save_tags
super do |new_tags, old_tags|
next if ::Feature.disabled?(:write_to_ci_runner_taggings, owner)
if old_tags.present?
tag_links
.where(tag_id: old_tags)
.delete_all
end
# Avoid inserting partitioned taggings that refer to a missing ci_runners partitioned record, since
# the backfill is not yet finalized.
ensure_partitioned_runner_record_exists if new_tags.any?
ci_runner_taggings = new_tags.map do |tag|
Ci::RunnerTagging.new(
runner_id: id, runner_type: runner_type,
tag_id: tag.id, sharding_key_id: sharding_key_id)
end
::Ci::RunnerTagging.bulk_insert!(
ci_runner_taggings,
validate: false,
unique_by: [:tag_id, :runner_id, :runner_type],
returns: :id
)
end
end
# TODO: Remove once https://gitlab.com/gitlab-org/gitlab/-/issues/504277 is closed.
def ensure_partitioned_runner_record_exists
self.class.sharded_table_proxy_model.insert_all(
[attributes.except('tag_list')], unique_by: [:id, :runner_type],
returning: false, record_timestamps: false
)
end
def predefined_variables
Gitlab::Ci::Variables::Collection.new
.append(key: 'CI_RUNNER_ID', value: id.to_s)

View File

@ -2,6 +2,8 @@
module Ci
class RunnerTagging < Ci::ApplicationRecord
include BulkInsertSafe
self.table_name = :ci_runner_taggings
self.primary_key = :id

View File

@ -50,8 +50,15 @@ class ProjectCiCdSetting < ApplicationRecord
chronic_duration_attr :runner_token_expiration_interval_human_readable, :runner_token_expiration_interval
chronic_duration_attr_writer :delete_pipelines_in_human_readable, :delete_pipelines_in_seconds
scope :configured_to_delete_old_pipelines, -> { where.not(delete_pipelines_in_seconds: nil) }
scope :with_project, -> { preload(:project) }
scope :for_project, ->(ids) { where(project_id: ids) }
scope :order_project_id_asc, -> { order(project_id: :asc) }
scope :configured_to_delete_old_pipelines, -> do
where.not(delete_pipelines_in_seconds: nil)
end
def self.pluck_project_id(limit)
limit(limit).pluck(:project_id)
end
def keep_latest_artifacts_available?
# The project level feature can only be enabled when the feature is enabled instance wide

View File

@ -82,8 +82,6 @@ module Users
end
def exceeded_daily_verification_limit?
return false unless Feature.enabled?(:credit_card_validation_daily_limit, user, type: :gitlab_com_derisk)
duplicate_record_count = self.class
.where(stripe_card_fingerprint: stripe_card_fingerprint)
.where('credit_card_validated_at > ?', 24.hours.ago)

View File

@ -336,6 +336,15 @@
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:ci_schedule_old_pipelines_removal_cron
:worker_name: Ci::ScheduleOldPipelinesRemovalCronWorker
:feature_category: :continuous_integration
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:ci_schedule_unlock_pipelines_in_queue_cron
:worker_name: Ci::ScheduleUnlockPipelinesInQueueCronWorker
:feature_category: :job_artifacts
@ -2901,6 +2910,15 @@
:weight: 1
:idempotent: true
:tags: []
- :name: ci_destroy_old_pipelines
:worker_name: Ci::DestroyOldPipelinesWorker
:feature_category: :continuous_integration
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: ci_initialize_pipelines_iid_sequence
:worker_name: Ci::InitializePipelinesIidSequenceWorker
:feature_category: :continuous_integration

View File

@ -0,0 +1,48 @@
# frozen_string_literal: true
module Ci
class DestroyOldPipelinesWorker
include ApplicationWorker
include LimitedCapacity::Worker
data_consistency :sticky
feature_category :continuous_integration
urgency :low
idempotent!
LIMIT = 250
CONCURRENCY = 10
def perform_work(*)
Project.find_by_id(fetch_next_project_id).try do |project|
with_context(project: project) do
timestamp = project.ci_delete_pipelines_in_seconds.seconds.ago
pipelines = Ci::Pipeline.for_project(project.id).created_before(timestamp).limit(LIMIT).to_a
pipelines.each { |pipeline| Ci::DestroyPipelineService.new(project, nil).unsafe_execute(pipeline) }
end
end
end
def max_running_jobs
CONCURRENCY
end
def remaining_work_count(*)
Gitlab::Redis::SharedState.with do |redis|
redis.scard(queue_key)
end
end
private
def fetch_next_project_id
Gitlab::Redis::SharedState.with do |redis|
redis.lpop(queue_key)
end
end
def queue_key
Ci::ScheduleOldPipelinesRemovalCronWorker::QUEUE_KEY
end
end
end

View File

@ -0,0 +1,73 @@
# frozen_string_literal: true
module Ci
class ScheduleOldPipelinesRemovalCronWorker
include ApplicationWorker
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext -- does not perform work scoped to a context
urgency :low
idempotent!
deduplicate :until_executed, including_scheduled: true
feature_category :continuous_integration
data_consistency :sticky
PROJECTS_LIMIT = 1_000
LAST_PROCESSED_REDIS_KEY = 'ci_old_pipelines_removal_last_processed_project_id{}'
REDIS_EXPIRATION_TIME = 2.hours.to_i
QUEUE_KEY = 'ci_old_pipelines_removal_project_ids_queue{}'
def perform
return if Feature.disabled?(:ci_delete_old_pipelines, :instance, type: :beta)
limit = PROJECTS_LIMIT - queued_entries_count
project_ids = fetch_next_project_ids(limit)
queue_projects_for_processing(project_ids)
remove_last_processed_id if project_ids.empty? || project_ids.size < limit
Ci::DestroyOldPipelinesWorker.perform_with_capacity
end
private
def fetch_next_project_ids(limit)
ProjectCiCdSetting
.configured_to_delete_old_pipelines
.for_project(last_processed_id..)
.order_project_id_asc
.pluck_project_id(limit)
end
def queued_entries_count
with_redis do |redis|
redis.scard(QUEUE_KEY).to_i
end
end
def queue_projects_for_processing(ids)
return if ids.empty?
with_redis do |redis|
redis.pipelined do |pipeline|
pipeline.rpush(QUEUE_KEY, ids)
pipeline.set(LAST_PROCESSED_REDIS_KEY, ids.last, ex: REDIS_EXPIRATION_TIME)
end
end
end
def last_processed_id
with_redis do |redis|
redis.get(LAST_PROCESSED_REDIS_KEY).to_i
end
end
def remove_last_processed_id
with_redis do |redis|
redis.del(LAST_PROCESSED_REDIS_KEY)
end
end
def with_redis(&)
Gitlab::Redis::SharedState.with(&) # rubocop:disable CodeReuse/ActiveRecord -- not AR
end
end
end

View File

@ -1,9 +0,0 @@
---
name: credit_card_validation_daily_limit
feature_issue_url: https://gitlab.com/gitlab-org/modelops/anti-abuse/team-tasks/-/issues/742
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/159151
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/472122
milestone: '17.3'
group: group::anti-abuse
type: gitlab_com_derisk
default_enabled: false

View File

@ -0,0 +1,9 @@
---
name: write_to_ci_runner_taggings
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/472974
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/173007
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/505003
milestone: '17.7'
group: group::runner
type: gitlab_com_derisk
default_enabled: false

View File

@ -4,5 +4,5 @@ introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/158786
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/472815
milestone: '17.3'
type: wip
group: group::observability
group: group::platform insights
default_enabled: false

View File

@ -3,6 +3,10 @@ abuse_report_assignees:
- table: users
column: user_id
on_delete: async_delete
ai_conversation_messages:
- table: ai_conversation_threads
column: thread_id
on_delete: async_delete
application_settings:
- table: push_rules
column: push_rule_id

View File

@ -736,6 +736,9 @@ Settings.cron_jobs['database_monitor_locked_tables_cron_worker']['job_class'] =
Settings.cron_jobs['merge_requests_process_scheduled_merge'] ||= {}
Settings.cron_jobs['merge_requests_process_scheduled_merge']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['merge_requests_process_scheduled_merge']['job_class'] = 'MergeRequests::ProcessScheduledMergeWorker'
Settings.cron_jobs['ci_schedule_old_pipelines_removal_cron_worker'] ||= {}
Settings.cron_jobs['ci_schedule_old_pipelines_removal_cron_worker']['cron'] ||= '*/11 * * * *'
Settings.cron_jobs['ci_schedule_old_pipelines_removal_cron_worker']['job_class'] = 'Ci::ScheduleOldPipelinesRemovalCronWorker'
Gitlab.ee do
Settings.cron_jobs['analytics_devops_adoption_create_all_snapshots_worker'] ||= {}

View File

@ -179,6 +179,8 @@
- 1
- - ci_delete_objects
- 1
- - ci_destroy_old_pipelines
- 1
- - ci_initialize_pipelines_iid_sequence
- 1
- - ci_job_artifacts_expire_project_build_artifacts

View File

@ -0,0 +1,26 @@
- title: "Gitaly rate limiting"
removal_milestone: "18.0"
announcement_milestone: "17.7"
breaking_change: false
reporter: qmnguyen0711
stage: systems
issue_url: https://gitlab.com/gitlab-org/gitaly/-/issues/5011
impact: low
scope: instance
resolution_role: Admin
manual_task: false
body: |
Because of the highly variable nature of Git operations and repository latencies, Gitaly
[RPC-based rate limiting](https://docs.gitlab.com/ee/administration/gitaly/monitoring.html#monitor-gitaly-rate-limiting)
is ineffective. Configuring proper rate limits is challenging and often becomes obsolete quickly because harmful
actions rarely generate enough requests per second to stand out.
Gitaly already supports [concurrency limiting](https://docs.gitlab.com/ee/administration/gitaly/concurrency_limiting.html) and an
[adaptive limiting add-on](https://docs.gitlab.com/ee/administration/gitaly/concurrency_limiting.html#adaptive-concurrency-limiting),
which have proven to work well in production.
Because Gitaly is not directly exposed to external networks and external protection layers, such as load balancers,
provide better safeguards, rate limiting is less effective.
Therefore, we're depecating rate limiting in favor of the more reliable concurrency limiting. Gitaly RPC-based
rate limiting will be removed in GitLab 18.0.

View File

@ -0,0 +1,13 @@
---
table_name: ai_conversation_messages
classes:
- Ai::Conversation::Message
feature_categories:
- ai_abstraction_layer
- duo_chat
description: Messages for GitLab Duo features.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/171934
milestone: '17.7'
gitlab_schema: gitlab_main_cell
sharding_key:
organization_id: organizations

View File

@ -0,0 +1,13 @@
---
table_name: ai_conversation_threads
classes:
- Ai::Conversation::Thread
feature_categories:
- ai_abstraction_layer
- duo_chat
description: Threads of messages for GitLab Duo features.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/171934
milestone: '17.7'
gitlab_schema: gitlab_main_cell
sharding_key:
organization_id: organizations

View File

@ -0,0 +1,8 @@
---
migration_job_name: BackfillProtectedBranchMergeAccessLevelsProtectedBranchNamespaceId
description: Backfills sharding key `protected_branch_merge_access_levels.protected_branch_namespace_id` from `protected_branches`.
feature_category: source_code_management
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/174564
milestone: '17.7'
queued_migration_version: 20241204130230
finalized_by: # version of the migration that finalized this BBM

View File

@ -0,0 +1,8 @@
---
migration_job_name: BackfillProtectedBranchMergeAccessLevelsProtectedBranchProjectId
description: Backfills sharding key `protected_branch_merge_access_levels.protected_branch_project_id` from `protected_branches`.
feature_category: source_code_management
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/174564
milestone: '17.7'
queued_migration_version: 20241204130225
finalized_by: # version of the migration that finalized this BBM

View File

@ -0,0 +1,8 @@
---
migration_job_name: BackfillStatusPagePublishedIncidentsNamespaceId
description: Backfills sharding key `status_page_published_incidents.namespace_id` from `issues`.
feature_category: incident_management
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/174868
milestone: '17.7'
queued_migration_version: 20241205143060
finalized_by: # version of the migration that finalized this BBM

View File

@ -26,3 +26,6 @@ desired_sharding_key:
sharding_key: namespace_id
belongs_to: protected_branch
table_size: small
desired_sharding_key_migration_job_name:
- BackfillProtectedBranchMergeAccessLevelsProtectedBranchProjectId
- BackfillProtectedBranchMergeAccessLevelsProtectedBranchNamespaceId

View File

@ -18,3 +18,4 @@ desired_sharding_key:
sharding_key: namespace_id
belongs_to: issue
table_size: small
desired_sharding_key_migration_job_name: BackfillStatusPagePublishedIncidentsNamespaceId

View File

@ -0,0 +1,39 @@
# frozen_string_literal: true
class CreateAiConversationThreadsAndMessages < Gitlab::Database::Migration[2.2]
milestone '17.7'
def change
create_table :ai_conversation_threads do |t| # rubocop:disable Migration/EnsureFactoryForTable, Lint/RedundantCopDisableDirective -- https://gitlab.com/gitlab-org/gitlab/-/issues/468630
t.bigint :user_id, null: false
t.bigint :organization_id, null: false
t.datetime_with_timezone :last_updated_at, null: false, default: -> { 'NOW()' }
t.timestamps_with_timezone null: false
t.integer :conversation_type, limit: 2, null: false
t.index :last_updated_at
t.index :organization_id
t.index [:user_id, :last_updated_at]
end
create_table :ai_conversation_messages do |t| # rubocop:disable Migration/EnsureFactoryForTable, Lint/RedundantCopDisableDirective -- https://gitlab.com/gitlab-org/gitlab/-/issues/468630
t.bigint :thread_id, null: false
t.bigint :agent_version_id, null: true
t.bigint :organization_id, null: false
t.timestamps_with_timezone null: false
t.integer :role, limit: 2, null: false
t.boolean :has_feedback, default: false
t.jsonb :extras, default: {}, null: false
t.jsonb :error_details, default: {}, null: false
t.text :content, null: false, limit: 512.kilobytes
t.text :request_xid, limit: 255
t.text :message_xid, limit: 255
t.text :referer_url, limit: 255
t.index [:thread_id, :created_at]
t.index :message_xid
t.index :organization_id
t.index :agent_version_id
end
end
end

View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
class AddFkToAiConversationThreadsAndMessages < Gitlab::Database::Migration[2.2]
milestone '17.7'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :ai_conversation_threads, :organizations, column: :organization_id, on_delete: :cascade
add_concurrent_foreign_key :ai_conversation_threads, :users, column: :user_id, on_delete: :cascade
add_concurrent_foreign_key :ai_conversation_messages, :ai_agent_versions, column: :agent_version_id,
on_delete: :nullify
add_concurrent_foreign_key :ai_conversation_messages, :organizations, column: :organization_id,
on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :ai_conversation_threads, column: :organization_id
remove_foreign_key :ai_conversation_threads, column: :user_id
remove_foreign_key :ai_conversation_messages, column: :agent_version_id
remove_foreign_key :ai_conversation_messages, column: :organization_id
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class TrackAiConversationThredRecordChanges < Gitlab::Database::Migration[2.2]
include Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers
milestone '17.7'
def up
track_record_deletions(:ai_conversation_threads)
end
def down
untrack_record_deletions(:ai_conversation_threads)
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddProtectedBranchProjectIdToProtectedBranchMergeAccessLevels < Gitlab::Database::Migration[2.2]
milestone '17.7'
def change
add_column :protected_branch_merge_access_levels, :protected_branch_project_id, :bigint
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddProtectedBranchNamespaceIdToProtectedBranchMergeAccessLevels < Gitlab::Database::Migration[2.2]
milestone '17.7'
def change
add_column :protected_branch_merge_access_levels, :protected_branch_namespace_id, :bigint
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddNamespaceIdToStatusPagePublishedIncidents < Gitlab::Database::Migration[2.2]
milestone '17.7'
def change
add_column :status_page_published_incidents, :namespace_id, :bigint
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class IdxProtectedBranchMergeAccessLevelsOnProtectedBranchProjectId < Gitlab::Database::Migration[2.2]
milestone '17.7'
disable_ddl_transaction!
INDEX_NAME = 'idx_protected_branch_merge_access_levels_protected_branch_proje'
def up
add_concurrent_index :protected_branch_merge_access_levels, :protected_branch_project_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :protected_branch_merge_access_levels, INDEX_NAME
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddProtectedBranchMergeAccessLevelsProtectedBranchProjectIdFk < Gitlab::Database::Migration[2.2]
milestone '17.7'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :protected_branch_merge_access_levels, :projects, column: :protected_branch_project_id,
on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :protected_branch_merge_access_levels, column: :protected_branch_project_id
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddProtectedBranchMergeAccessLevelsProtectedBranchProjectIdTrigger < Gitlab::Database::Migration[2.2]
milestone '17.7'
def up
install_sharding_key_assignment_trigger(
table: :protected_branch_merge_access_levels,
sharding_key: :protected_branch_project_id,
parent_table: :protected_branches,
parent_sharding_key: :project_id,
foreign_key: :protected_branch_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :protected_branch_merge_access_levels,
sharding_key: :protected_branch_project_id,
parent_table: :protected_branches,
parent_sharding_key: :project_id,
foreign_key: :protected_branch_id
)
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class QueueBackfillProtectedBranchMergeAccessLevelsProjectId < Gitlab::Database::Migration[2.2]
milestone '17.7'
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
MIGRATION = "BackfillProtectedBranchMergeAccessLevelsProtectedBranchProjectId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:protected_branch_merge_access_levels,
:id,
:protected_branch_project_id,
:protected_branches,
:project_id,
:protected_branch_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:protected_branch_merge_access_levels,
:id,
[
:protected_branch_project_id,
:protected_branches,
:project_id,
:protected_branch_id
]
)
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class IdxProtectedBranchMergeAccessLevelsOnProtectedBranchNamespaceId < Gitlab::Database::Migration[2.2]
milestone '17.7'
disable_ddl_transaction!
INDEX_NAME = 'idx_protected_branch_merge_access_levels_protected_branch_names'
def up
add_concurrent_index :protected_branch_merge_access_levels, :protected_branch_namespace_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :protected_branch_merge_access_levels, INDEX_NAME
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddProtectedBranchMergeAccessLevelsProtectedBranchNamespaceIdFk < Gitlab::Database::Migration[2.2]
milestone '17.7'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :protected_branch_merge_access_levels, :namespaces,
column: :protected_branch_namespace_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :protected_branch_merge_access_levels, column: :protected_branch_namespace_id
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddProtectedBranchMergeAccessLevelsProtectedBranchNamespaceIdTrigger < Gitlab::Database::Migration[2.2]
milestone '17.7'
def up
install_sharding_key_assignment_trigger(
table: :protected_branch_merge_access_levels,
sharding_key: :protected_branch_namespace_id,
parent_table: :protected_branches,
parent_sharding_key: :namespace_id,
foreign_key: :protected_branch_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :protected_branch_merge_access_levels,
sharding_key: :protected_branch_namespace_id,
parent_table: :protected_branches,
parent_sharding_key: :namespace_id,
foreign_key: :protected_branch_id
)
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class QueueBackfillProtectedBranchMergeAccessLevelsNamespaceId < Gitlab::Database::Migration[2.2]
milestone '17.7'
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
MIGRATION = "BackfillProtectedBranchMergeAccessLevelsProtectedBranchNamespaceId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:protected_branch_merge_access_levels,
:id,
:protected_branch_namespace_id,
:protected_branches,
:namespace_id,
:protected_branch_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:protected_branch_merge_access_levels,
:id,
[
:protected_branch_namespace_id,
:protected_branches,
:namespace_id,
:protected_branch_id
]
)
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
class IndexProjectCiCdSettingsForPipelineRemoval < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.7'
OLD_INDEX = 'index_project_ci_cd_settings_on_id_partial'
INDEX = 'index_project_ci_cd_settings_on_project_id_partial'
def up
remove_concurrent_index :project_ci_cd_settings, :id, name: OLD_INDEX
add_concurrent_index :project_ci_cd_settings, :project_id,
where: 'delete_pipelines_in_seconds IS NOT NULL', name: INDEX
end
def down
add_concurrent_index :project_ci_cd_settings, :id,
where: 'delete_pipelines_in_seconds IS NOT NULL', name: OLD_INDEX
remove_concurrent_index :project_ci_cd_settings, :project_id, name: INDEX
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class IndexStatusPagePublishedIncidentsOnNamespaceId < Gitlab::Database::Migration[2.2]
milestone '17.7'
disable_ddl_transaction!
INDEX_NAME = 'index_status_page_published_incidents_on_namespace_id'
def up
add_concurrent_index :status_page_published_incidents, :namespace_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :status_page_published_incidents, INDEX_NAME
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddStatusPagePublishedIncidentsNamespaceIdFk < Gitlab::Database::Migration[2.2]
milestone '17.7'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :status_page_published_incidents, :namespaces, column: :namespace_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :status_page_published_incidents, column: :namespace_id
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddStatusPagePublishedIncidentsNamespaceIdTrigger < Gitlab::Database::Migration[2.2]
milestone '17.7'
def up
install_sharding_key_assignment_trigger(
table: :status_page_published_incidents,
sharding_key: :namespace_id,
parent_table: :issues,
parent_sharding_key: :namespace_id,
foreign_key: :issue_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :status_page_published_incidents,
sharding_key: :namespace_id,
parent_table: :issues,
parent_sharding_key: :namespace_id,
foreign_key: :issue_id
)
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class QueueBackfillStatusPagePublishedIncidentsNamespaceId < Gitlab::Database::Migration[2.2]
milestone '17.7'
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
MIGRATION = "BackfillStatusPagePublishedIncidentsNamespaceId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:status_page_published_incidents,
:id,
:namespace_id,
:issues,
:namespace_id,
:issue_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:status_page_published_incidents,
:id,
[
:namespace_id,
:issues,
:namespace_id,
:issue_id
]
)
end
end

View File

@ -0,0 +1 @@
ab51cce35a6020a01ece32b9f7fcb9514f0298c206501669d2bff0a76247920a

View File

@ -0,0 +1 @@
3bcd882c894b0f9c60b07c4111616f8a94d2ab80cf840b2470f2d7565e9260c4

View File

@ -0,0 +1 @@
c52313e55db4a50a5f3a6e8668831ffd971be7483b3cef6e8d497a500d27035d

View File

@ -0,0 +1 @@
1e016cdcb87f3794422eba3e63f9eed9ed31cadd81b60c26ae12abc5873b9559

View File

@ -0,0 +1 @@
e644a608d47ef97e099df831390df81a62f65618aa8912bf811e192d723e57f9

View File

@ -0,0 +1 @@
b4837be8ab67b7c0f95c9e36aab31956fa10145bf94c30e88b44caa3c1dfbda8

View File

@ -0,0 +1 @@
5d9bf71d27193ebb27d098439581721a3b84e454dee0d1be3d47b1d3bec03694

View File

@ -0,0 +1 @@
f19df9f4978799b7eefaa9a5eb16d5add079ae7d6bfb34b808307cfcf06811d0

View File

@ -0,0 +1 @@
d65257f8c1c805654fb1da7f6cfe54504c3c98e5ae0b24c18cd11749eb8ea1d4

View File

@ -0,0 +1 @@
2d00490a000507a683dcd7f43de9440a2f1800996aeaa32955d1a8df2262da15

View File

@ -0,0 +1 @@
feeb2affc1884ea5eabb904f6788da6c6407233868b9bf13f9cb870d4ab33a61

View File

@ -0,0 +1 @@
b6826a4f6f4383b28d1990275ae8243425efac585c4bc0883ab0477d5068e8cc

View File

@ -0,0 +1 @@
f2301d9ebd4aa0e8f18f32b9c467031fd9872e58a5868bb613b883d2019e670a

View File

@ -0,0 +1 @@
d1e67db49bbfb3c65aa8d0bff2de46bd78dc3057e2338344c33f684ddfb6a06c

View File

@ -0,0 +1 @@
be2cd9f5c5cf4fa9ba941735ace989c6b4a16b50150ec6e5fac4125653a947ce

View File

@ -0,0 +1 @@
d900f3896f99f8bf65c776cc5c6d4d20cb4bf9e20ab5651df4fc5b9f8fccc843

View File

@ -0,0 +1 @@
62778787a1e3bb41291887c93a8aec7fd60bfadb808ad2d1b4283fb5ab227fca

View File

@ -0,0 +1 @@
88e34456142cf48b55d79a59c88934387eb98d7ffa97a0450656dc54ed3dcd1b

View File

@ -0,0 +1 @@
fe926f2a168130492492e670268c9e32ce860150db85ceffae37f4b0f58ea9f6

View File

@ -978,6 +978,22 @@ RETURN NEW;
END
$$;
CREATE FUNCTION trigger_05cc4448a8aa() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."protected_branch_namespace_id" IS NULL THEN
SELECT "namespace_id"
INTO NEW."protected_branch_namespace_id"
FROM "protected_branches"
WHERE "protected_branches"."id" = NEW."protected_branch_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_05ce163deddf() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -1026,6 +1042,22 @@ RETURN NEW;
END
$$;
CREATE FUNCTION trigger_0aea02e5a699() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."protected_branch_project_id" IS NULL THEN
SELECT "project_id"
INTO NEW."protected_branch_project_id"
FROM "protected_branches"
WHERE "protected_branches"."id" = NEW."protected_branch_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_0c326daf67cf() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -2865,6 +2897,22 @@ RETURN NEW;
END
$$;
CREATE FUNCTION trigger_dbe374a57cbb() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."namespace_id" IS NULL THEN
SELECT "namespace_id"
INTO NEW."namespace_id"
FROM "issues"
WHERE "issues"."id" = NEW."issue_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_dc13168b8025() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -6103,6 +6151,55 @@ CREATE SEQUENCE ai_code_suggestion_events_id_seq
ALTER SEQUENCE ai_code_suggestion_events_id_seq OWNED BY ai_code_suggestion_events.id;
CREATE TABLE ai_conversation_messages (
id bigint NOT NULL,
thread_id bigint NOT NULL,
agent_version_id bigint,
organization_id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
role smallint NOT NULL,
has_feedback boolean DEFAULT false,
extras jsonb DEFAULT '{}'::jsonb NOT NULL,
error_details jsonb DEFAULT '{}'::jsonb NOT NULL,
content text NOT NULL,
request_xid text,
message_xid text,
referer_url text,
CONSTRAINT check_0fe78937e4 CHECK ((char_length(content) <= 524288)),
CONSTRAINT check_8daec62ec9 CHECK ((char_length(request_xid) <= 255)),
CONSTRAINT check_b14b137e02 CHECK ((char_length(message_xid) <= 255)),
CONSTRAINT check_f36c73d1d9 CHECK ((char_length(referer_url) <= 255))
);
CREATE SEQUENCE ai_conversation_messages_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE ai_conversation_messages_id_seq OWNED BY ai_conversation_messages.id;
CREATE TABLE ai_conversation_threads (
id bigint NOT NULL,
user_id bigint NOT NULL,
organization_id bigint NOT NULL,
last_updated_at timestamp with time zone DEFAULT now() NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
conversation_type smallint NOT NULL
);
CREATE SEQUENCE ai_conversation_threads_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE ai_conversation_threads_id_seq OWNED BY ai_conversation_threads.id;
CREATE TABLE ai_feature_settings (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@ -18619,7 +18716,9 @@ CREATE TABLE protected_branch_merge_access_levels (
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
user_id bigint,
group_id bigint
group_id bigint,
protected_branch_project_id bigint,
protected_branch_namespace_id bigint
);
CREATE SEQUENCE protected_branch_merge_access_levels_id_seq
@ -20182,7 +20281,8 @@ CREATE TABLE status_page_published_incidents (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
issue_id bigint NOT NULL
issue_id bigint NOT NULL,
namespace_id bigint
);
CREATE SEQUENCE status_page_published_incidents_id_seq
@ -23139,6 +23239,10 @@ ALTER TABLE ONLY ai_agents ALTER COLUMN id SET DEFAULT nextval('ai_agents_id_seq
ALTER TABLE ONLY ai_code_suggestion_events ALTER COLUMN id SET DEFAULT nextval('ai_code_suggestion_events_id_seq'::regclass);
ALTER TABLE ONLY ai_conversation_messages ALTER COLUMN id SET DEFAULT nextval('ai_conversation_messages_id_seq'::regclass);
ALTER TABLE ONLY ai_conversation_threads ALTER COLUMN id SET DEFAULT nextval('ai_conversation_threads_id_seq'::regclass);
ALTER TABLE ONLY ai_feature_settings ALTER COLUMN id SET DEFAULT nextval('ai_feature_settings_id_seq'::regclass);
ALTER TABLE ONLY ai_self_hosted_models ALTER COLUMN id SET DEFAULT nextval('ai_self_hosted_models_id_seq'::regclass);
@ -24969,6 +25073,12 @@ ALTER TABLE ONLY ai_agents
ALTER TABLE ONLY ai_code_suggestion_events
ADD CONSTRAINT ai_code_suggestion_events_pkey PRIMARY KEY (id, "timestamp");
ALTER TABLE ONLY ai_conversation_messages
ADD CONSTRAINT ai_conversation_messages_pkey PRIMARY KEY (id);
ALTER TABLE ONLY ai_conversation_threads
ADD CONSTRAINT ai_conversation_threads_pkey PRIMARY KEY (id);
ALTER TABLE ONLY ai_feature_settings
ADD CONSTRAINT ai_feature_settings_pkey PRIMARY KEY (id);
@ -28961,6 +29071,10 @@ CREATE INDEX idx_projects_id_created_at_disable_overriding_approvers_true ON pro
CREATE INDEX idx_projects_on_repository_storage_last_repository_updated_at ON projects USING btree (id, repository_storage, last_repository_updated_at);
CREATE INDEX idx_protected_branch_merge_access_levels_protected_branch_names ON protected_branch_merge_access_levels USING btree (protected_branch_namespace_id);
CREATE INDEX idx_protected_branch_merge_access_levels_protected_branch_proje ON protected_branch_merge_access_levels USING btree (protected_branch_project_id);
CREATE INDEX idx_reminder_frequency_on_work_item_progresses ON work_item_progresses USING btree (reminder_frequency);
CREATE INDEX idx_sbom_components_on_name_gin ON sbom_components USING gin (name gin_trgm_ops);
@ -29171,6 +29285,20 @@ CREATE INDEX index_ai_code_suggestion_events_on_organization_id ON ONLY ai_code_
CREATE INDEX index_ai_code_suggestion_events_on_user_id ON ONLY ai_code_suggestion_events USING btree (user_id);
CREATE INDEX index_ai_conversation_messages_on_agent_version_id ON ai_conversation_messages USING btree (agent_version_id);
CREATE INDEX index_ai_conversation_messages_on_message_xid ON ai_conversation_messages USING btree (message_xid);
CREATE INDEX index_ai_conversation_messages_on_organization_id ON ai_conversation_messages USING btree (organization_id);
CREATE INDEX index_ai_conversation_messages_on_thread_id_and_created_at ON ai_conversation_messages USING btree (thread_id, created_at);
CREATE INDEX index_ai_conversation_threads_on_last_updated_at ON ai_conversation_threads USING btree (last_updated_at);
CREATE INDEX index_ai_conversation_threads_on_organization_id ON ai_conversation_threads USING btree (organization_id);
CREATE INDEX index_ai_conversation_threads_on_user_id_and_last_updated_at ON ai_conversation_threads USING btree (user_id, last_updated_at);
CREATE INDEX index_ai_feature_settings_on_ai_self_hosted_model_id ON ai_feature_settings USING btree (ai_self_hosted_model_id);
CREATE UNIQUE INDEX index_ai_feature_settings_on_feature ON ai_feature_settings USING btree (feature);
@ -31981,10 +32109,10 @@ CREATE UNIQUE INDEX index_project_auto_devops_on_project_id ON project_auto_devo
CREATE UNIQUE INDEX index_project_build_artifacts_size_refreshes_on_project_id ON project_build_artifacts_size_refreshes USING btree (project_id);
CREATE INDEX index_project_ci_cd_settings_on_id_partial ON project_ci_cd_settings USING btree (id) WHERE (delete_pipelines_in_seconds IS NOT NULL);
CREATE UNIQUE INDEX index_project_ci_cd_settings_on_project_id ON project_ci_cd_settings USING btree (project_id);
CREATE INDEX index_project_ci_cd_settings_on_project_id_partial ON project_ci_cd_settings USING btree (project_id) WHERE (delete_pipelines_in_seconds IS NOT NULL);
CREATE UNIQUE INDEX index_project_ci_feature_usages_unique_columns ON project_ci_feature_usages USING btree (project_id, feature, default_branch);
CREATE INDEX index_project_compliance_framework_settings_on_framework_id ON project_compliance_framework_settings USING btree (framework_id);
@ -32659,6 +32787,8 @@ CREATE INDEX index_status_check_responses_on_project_id ON status_check_response
CREATE UNIQUE INDEX index_status_page_published_incidents_on_issue_id ON status_page_published_incidents USING btree (issue_id);
CREATE INDEX index_status_page_published_incidents_on_namespace_id ON status_page_published_incidents USING btree (namespace_id);
CREATE INDEX index_status_page_settings_on_project_id ON status_page_settings USING btree (project_id);
CREATE INDEX index_subscription_add_on_purchases_on_namespace_id_add_on_id ON subscription_add_on_purchases USING btree (namespace_id, subscription_add_on_id);
@ -35443,6 +35573,8 @@ ALTER INDEX p_ci_job_artifacts_expire_at_job_id_idx1 ATTACH PARTITION tmp_index_
ALTER INDEX p_ci_builds_token_encrypted_partition_id_idx ATTACH PARTITION unique_ci_builds_token_encrypted_and_partition_id;
CREATE TRIGGER ai_conversation_threads_loose_fk_trigger AFTER DELETE ON ai_conversation_threads REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records();
CREATE TRIGGER assign_ci_runner_taggings_id_trigger BEFORE INSERT ON ci_runner_taggings FOR EACH ROW EXECUTE FUNCTION assign_ci_runner_taggings_id_value();
CREATE TRIGGER assign_p_ci_build_tags_id_trigger BEFORE INSERT ON p_ci_build_tags FOR EACH ROW EXECUTE FUNCTION assign_p_ci_build_tags_id_value();
@ -35517,12 +35649,16 @@ CREATE TRIGGER trigger_02450faab875 BEFORE INSERT OR UPDATE ON vulnerability_occ
CREATE TRIGGER trigger_038fe84feff7 BEFORE INSERT OR UPDATE ON approvals FOR EACH ROW EXECUTE FUNCTION trigger_038fe84feff7();
CREATE TRIGGER trigger_05cc4448a8aa BEFORE INSERT OR UPDATE ON protected_branch_merge_access_levels FOR EACH ROW EXECUTE FUNCTION trigger_05cc4448a8aa();
CREATE TRIGGER trigger_05ce163deddf BEFORE INSERT OR UPDATE ON status_check_responses FOR EACH ROW EXECUTE FUNCTION trigger_05ce163deddf();
CREATE TRIGGER trigger_0a1b0adcf686 BEFORE INSERT OR UPDATE ON packages_debian_project_components FOR EACH ROW EXECUTE FUNCTION trigger_0a1b0adcf686();
CREATE TRIGGER trigger_0a29d4d42b62 BEFORE INSERT OR UPDATE ON approval_project_rules_protected_branches FOR EACH ROW EXECUTE FUNCTION trigger_0a29d4d42b62();
CREATE TRIGGER trigger_0aea02e5a699 BEFORE INSERT OR UPDATE ON protected_branch_merge_access_levels FOR EACH ROW EXECUTE FUNCTION trigger_0aea02e5a699();
CREATE TRIGGER trigger_0c326daf67cf BEFORE INSERT OR UPDATE ON analytics_cycle_analytics_value_stream_settings FOR EACH ROW EXECUTE FUNCTION trigger_0c326daf67cf();
CREATE TRIGGER trigger_0da002390fdc BEFORE INSERT OR UPDATE ON operations_feature_flags_issues FOR EACH ROW EXECUTE FUNCTION trigger_0da002390fdc();
@ -35755,6 +35891,8 @@ CREATE TRIGGER trigger_dadd660afe2c BEFORE INSERT OR UPDATE ON packages_debian_g
CREATE TRIGGER trigger_dbdd61a66a91 BEFORE INSERT OR UPDATE ON agent_activity_events FOR EACH ROW EXECUTE FUNCTION trigger_dbdd61a66a91();
CREATE TRIGGER trigger_dbe374a57cbb BEFORE INSERT OR UPDATE ON status_page_published_incidents FOR EACH ROW EXECUTE FUNCTION trigger_dbe374a57cbb();
CREATE TRIGGER trigger_dc13168b8025 BEFORE INSERT OR UPDATE ON vulnerability_flags FOR EACH ROW EXECUTE FUNCTION trigger_dc13168b8025();
CREATE TRIGGER trigger_delete_project_namespace_on_project_delete AFTER DELETE ON projects FOR EACH ROW WHEN ((old.project_namespace_id IS NOT NULL)) EXECUTE FUNCTION delete_associated_project_namespace();
@ -35843,6 +35981,9 @@ CREATE TRIGGER users_loose_fk_trigger AFTER DELETE ON users REFERENCING OLD TABL
CREATE TRIGGER virtual_registries_packages_maven_upstreams_loose_fk_trigger AFTER DELETE ON virtual_registries_packages_maven_upstreams REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records();
ALTER TABLE ONLY ai_conversation_threads
ADD CONSTRAINT fk_00234c7444 FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
ALTER TABLE ONLY deployments
ADD CONSTRAINT fk_009fd21147 FOREIGN KEY (environment_id) REFERENCES environments(id) ON DELETE CASCADE;
@ -36221,6 +36362,9 @@ ALTER TABLE ONLY operations_feature_flags_issues
ALTER TABLE ONLY push_event_payloads
ADD CONSTRAINT fk_36c74129da FOREIGN KEY (event_id) REFERENCES events(id) ON DELETE CASCADE;
ALTER TABLE ONLY protected_branch_merge_access_levels
ADD CONSTRAINT fk_37ab3dd3ba FOREIGN KEY (protected_branch_project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY protected_tag_create_access_levels
ADD CONSTRAINT fk_386a642e13 FOREIGN KEY (deploy_key_id) REFERENCES keys(id) ON DELETE CASCADE;
@ -36515,6 +36659,9 @@ ALTER TABLE ONLY import_placeholder_memberships
ALTER TABLE p_ci_builds
ADD CONSTRAINT fk_6661f4f0e8 FOREIGN KEY (resource_group_id) REFERENCES ci_resource_groups(id) ON DELETE SET NULL;
ALTER TABLE ONLY ai_conversation_messages
ADD CONSTRAINT fk_68774ec148 FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
ALTER TABLE ONLY remote_development_agent_configs
ADD CONSTRAINT fk_6a09894a0f FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
@ -36941,6 +37088,9 @@ ALTER TABLE ONLY merge_requests_closing_issues
ALTER TABLE ONLY issue_assignment_events
ADD CONSTRAINT fk_a989e2acd0 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY status_page_published_incidents
ADD CONSTRAINT fk_a9fb727793 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY ssh_signatures
ADD CONSTRAINT fk_aa1efbe865 FOREIGN KEY (key_id) REFERENCES keys(id) ON DELETE SET NULL;
@ -37025,6 +37175,9 @@ ALTER TABLE ONLY status_check_responses
ALTER TABLE ONLY packages_dependency_links
ADD CONSTRAINT fk_b5c56b6ede FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY ai_conversation_messages
ADD CONSTRAINT fk_b5d715b1e4 FOREIGN KEY (agent_version_id) REFERENCES ai_agent_versions(id) ON DELETE SET NULL;
ALTER TABLE ONLY compliance_framework_security_policies
ADD CONSTRAINT fk_b5df066d8f FOREIGN KEY (framework_id) REFERENCES compliance_management_frameworks(id) ON DELETE CASCADE;
@ -37307,6 +37460,9 @@ ALTER TABLE ONLY sbom_occurrences
ALTER TABLE ONLY todos
ADD CONSTRAINT fk_d94154aa95 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ALTER TABLE ONLY ai_conversation_threads
ADD CONSTRAINT fk_d97014a270 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ALTER TABLE ONLY label_links
ADD CONSTRAINT fk_d97dd08678 FOREIGN KEY (label_id) REFERENCES labels(id) ON DELETE CASCADE;
@ -37508,6 +37664,9 @@ ALTER TABLE ONLY scan_result_policy_violations
ALTER TABLE ONLY analytics_devops_adoption_segments
ADD CONSTRAINT fk_f5aa768998 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY protected_branch_merge_access_levels
ADD CONSTRAINT fk_f5acff2bb8 FOREIGN KEY (protected_branch_namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY boards_epic_list_user_preferences
ADD CONSTRAINT fk_f5f2fe5c1f FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;

View File

@ -15,7 +15,13 @@ Metric definitions are available:
- Using [Grafana Explore](https://grafana.com/docs/grafana/latest/explore/) on a
Grafana instance configured against Prometheus.
## Monitor Gitaly rate limiting
<!--- start_remove The following content will be removed on remove_date: '2025-08-01' -->
## Monitor Gitaly rate limiting (deprecated)
WARNING:
This feature was [deprecated](https://gitlab.com/gitlab-org/gitaly/-/issues/5011) in GitLab 17.7
and is planned for removal in 18.0. Use [concurrency limiting](concurrency_limiting.md) instead.
Gitaly can be configured to limit requests based on:
@ -29,6 +35,8 @@ of requests dropped due to request limiting. The `reason` label indicates why a
- `max_size`, because the concurrency queue size was reached.
- `max_time`, because the request exceeded the maximum queue wait time as configured in Gitaly.
<!--- end_remove -->
## Monitor Gitaly concurrency limiting
You can observe specific behavior of [concurrency-queued requests](concurrency_limiting.md#limit-rpc-concurrency) using Gitaly logs and Prometheus.

View File

@ -18063,6 +18063,7 @@ A user with add-on data.
| <a id="addonuserstate"></a>`state` | [`UserState!`](#userstate) | State of the user. |
| <a id="addonuserstatus"></a>`status` | [`UserStatus`](#userstatus) | User status. |
| <a id="addonusertwitter"></a>`twitter` | [`String`](#string) | X (formerly Twitter) username of the user. |
| <a id="addonusertype"></a>`type` | [`UserType!`](#usertype) | Type of the user. |
| <a id="addonuseruserpermissions"></a>`userPermissions` | [`UserPermissions!`](#userpermissions) | Permissions for the current user on the resource. |
| <a id="addonuseruserpreferences"></a>`userPreferences` | [`UserPreferences`](#userpreferences) | Preferences for the user. |
| <a id="addonuserusername"></a>`username` | [`String!`](#string) | Username of the user. Unique within this instance of GitLab. |
@ -19019,6 +19020,7 @@ Core representation of a GitLab user.
| <a id="autocompleteduserstate"></a>`state` | [`UserState!`](#userstate) | State of the user. |
| <a id="autocompleteduserstatus"></a>`status` | [`UserStatus`](#userstatus) | User status. |
| <a id="autocompletedusertwitter"></a>`twitter` | [`String`](#string) | X (formerly Twitter) username of the user. |
| <a id="autocompletedusertype"></a>`type` | [`UserType!`](#usertype) | Type of the user. |
| <a id="autocompleteduseruserpermissions"></a>`userPermissions` | [`UserPermissions!`](#userpermissions) | Permissions for the current user on the resource. |
| <a id="autocompleteduseruserpreferences"></a>`userPreferences` | [`UserPreferences`](#userpreferences) | Preferences for the user. |
| <a id="autocompleteduserusername"></a>`username` | [`String!`](#string) | Username of the user. Unique within this instance of GitLab. |
@ -21571,6 +21573,7 @@ The currently authenticated GitLab user.
| <a id="currentuserstate"></a>`state` | [`UserState!`](#userstate) | State of the user. |
| <a id="currentuserstatus"></a>`status` | [`UserStatus`](#userstatus) | User status. |
| <a id="currentusertwitter"></a>`twitter` | [`String`](#string) | X (formerly Twitter) username of the user. |
| <a id="currentusertype"></a>`type` | [`UserType!`](#usertype) | Type of the user. |
| <a id="currentuseruserpermissions"></a>`userPermissions` | [`UserPermissions!`](#userpermissions) | Permissions for the current user on the resource. |
| <a id="currentuseruserpreferences"></a>`userPreferences` | [`UserPreferences`](#userpreferences) | Preferences for the user. |
| <a id="currentuserusername"></a>`username` | [`String!`](#string) | Username of the user. Unique within this instance of GitLab. |
@ -27530,6 +27533,7 @@ A user assigned to a merge request.
| <a id="mergerequestassigneestate"></a>`state` | [`UserState!`](#userstate) | State of the user. |
| <a id="mergerequestassigneestatus"></a>`status` | [`UserStatus`](#userstatus) | User status. |
| <a id="mergerequestassigneetwitter"></a>`twitter` | [`String`](#string) | X (formerly Twitter) username of the user. |
| <a id="mergerequestassigneetype"></a>`type` | [`UserType!`](#usertype) | Type of the user. |
| <a id="mergerequestassigneeuserpermissions"></a>`userPermissions` | [`UserPermissions!`](#userpermissions) | Permissions for the current user on the resource. |
| <a id="mergerequestassigneeuserpreferences"></a>`userPreferences` | [`UserPreferences`](#userpreferences) | Preferences for the user. |
| <a id="mergerequestassigneeusername"></a>`username` | [`String!`](#string) | Username of the user. Unique within this instance of GitLab. |
@ -27935,6 +27939,7 @@ The author of the merge request.
| <a id="mergerequestauthorstate"></a>`state` | [`UserState!`](#userstate) | State of the user. |
| <a id="mergerequestauthorstatus"></a>`status` | [`UserStatus`](#userstatus) | User status. |
| <a id="mergerequestauthortwitter"></a>`twitter` | [`String`](#string) | X (formerly Twitter) username of the user. |
| <a id="mergerequestauthortype"></a>`type` | [`UserType!`](#usertype) | Type of the user. |
| <a id="mergerequestauthoruserpermissions"></a>`userPermissions` | [`UserPermissions!`](#userpermissions) | Permissions for the current user on the resource. |
| <a id="mergerequestauthoruserpreferences"></a>`userPreferences` | [`UserPreferences`](#userpreferences) | Preferences for the user. |
| <a id="mergerequestauthorusername"></a>`username` | [`String!`](#string) | Username of the user. Unique within this instance of GitLab. |
@ -28386,6 +28391,7 @@ A user participating in a merge request.
| <a id="mergerequestparticipantstate"></a>`state` | [`UserState!`](#userstate) | State of the user. |
| <a id="mergerequestparticipantstatus"></a>`status` | [`UserStatus`](#userstatus) | User status. |
| <a id="mergerequestparticipanttwitter"></a>`twitter` | [`String`](#string) | X (formerly Twitter) username of the user. |
| <a id="mergerequestparticipanttype"></a>`type` | [`UserType!`](#usertype) | Type of the user. |
| <a id="mergerequestparticipantuserpermissions"></a>`userPermissions` | [`UserPermissions!`](#userpermissions) | Permissions for the current user on the resource. |
| <a id="mergerequestparticipantuserpreferences"></a>`userPreferences` | [`UserPreferences`](#userpreferences) | Preferences for the user. |
| <a id="mergerequestparticipantusername"></a>`username` | [`String!`](#string) | Username of the user. Unique within this instance of GitLab. |
@ -28810,6 +28816,7 @@ A user assigned to a merge request as a reviewer.
| <a id="mergerequestreviewerstate"></a>`state` | [`UserState!`](#userstate) | State of the user. |
| <a id="mergerequestreviewerstatus"></a>`status` | [`UserStatus`](#userstatus) | User status. |
| <a id="mergerequestreviewertwitter"></a>`twitter` | [`String`](#string) | X (formerly Twitter) username of the user. |
| <a id="mergerequestreviewertype"></a>`type` | [`UserType!`](#usertype) | Type of the user. |
| <a id="mergerequestrevieweruserpermissions"></a>`userPermissions` | [`UserPermissions!`](#userpermissions) | Permissions for the current user on the resource. |
| <a id="mergerequestrevieweruserpreferences"></a>`userPreferences` | [`UserPreferences`](#userpreferences) | Preferences for the user. |
| <a id="mergerequestreviewerusername"></a>`username` | [`String!`](#string) | Username of the user. Unique within this instance of GitLab. |
@ -35600,6 +35607,7 @@ Core representation of a GitLab user.
| <a id="usercorestate"></a>`state` | [`UserState!`](#userstate) | State of the user. |
| <a id="usercorestatus"></a>`status` | [`UserStatus`](#userstatus) | User status. |
| <a id="usercoretwitter"></a>`twitter` | [`String`](#string) | X (formerly Twitter) username of the user. |
| <a id="usercoretype"></a>`type` | [`UserType!`](#usertype) | Type of the user. |
| <a id="usercoreuserpermissions"></a>`userPermissions` | [`UserPermissions!`](#userpermissions) | Permissions for the current user on the resource. |
| <a id="usercoreuserpreferences"></a>`userPreferences` | [`UserPreferences`](#userpreferences) | Preferences for the user. |
| <a id="usercoreusername"></a>`username` | [`String!`](#string) | Username of the user. Unique within this instance of GitLab. |
@ -37741,6 +37749,7 @@ LLMs supported by the self-hosted model features.
| <a id="aiacceptedselfhostedmodelsgpt"></a>`GPT` | GPT: Suitable for code suggestions. |
| <a id="aiacceptedselfhostedmodelsllama3"></a>`LLAMA3` | LLaMA 3: Suitable for code suggestions and duo chat. |
| <a id="aiacceptedselfhostedmodelsmistral"></a>`MISTRAL` | Mistral: Suitable for code suggestions and duo chat. |
| <a id="aiacceptedselfhostedmodelsmixtral"></a>`MIXTRAL` | Mixtral: Suitable for code suggestions and duo chat. |
### `AiAction`
@ -40835,6 +40844,31 @@ Possible states of a user.
| <a id="userstatedeactivated"></a>`deactivated` | User is no longer active and cannot use the system. |
| <a id="userstateldap_blocked"></a>`ldap_blocked` | User has been blocked by the system. |
### `UserType`
Possible types of user.
| Value | Description |
| ----- | ----------- |
| <a id="usertypeadmin_bot"></a>`ADMIN_BOT` | Admin bot. |
| <a id="usertypealert_bot"></a>`ALERT_BOT` | Alert bot. |
| <a id="usertypeautomation_bot"></a>`AUTOMATION_BOT` | Automation bot. |
| <a id="usertypeduo_code_review_bot"></a>`DUO_CODE_REVIEW_BOT` | Duo code review bot. |
| <a id="usertypeghost"></a>`GHOST` | Ghost. |
| <a id="usertypehuman"></a>`HUMAN` | Human. |
| <a id="usertypeimport_user"></a>`IMPORT_USER` | Import user. |
| <a id="usertypellm_bot"></a>`LLM_BOT` | Llm bot. |
| <a id="usertypemigration_bot"></a>`MIGRATION_BOT` | Migration bot. |
| <a id="usertypeplaceholder"></a>`PLACEHOLDER` | Placeholder. |
| <a id="usertypeproject_bot"></a>`PROJECT_BOT` | Project bot. |
| <a id="usertypesecurity_bot"></a>`SECURITY_BOT` | Security bot. |
| <a id="usertypesecurity_policy_bot"></a>`SECURITY_POLICY_BOT` | Security policy bot. |
| <a id="usertypeservice_account"></a>`SERVICE_ACCOUNT` | Service account. |
| <a id="usertypeservice_user"></a>`SERVICE_USER` | Service user. |
| <a id="usertypesuggested_reviewers_bot"></a>`SUGGESTED_REVIEWERS_BOT` | Suggested reviewers bot. |
| <a id="usertypesupport_bot"></a>`SUPPORT_BOT` | Support bot. |
| <a id="usertypevisual_review_bot"></a>`VISUAL_REVIEW_BOT` | Visual review bot. |
### `ValueStreamDashboardMetric`
Possible identifier types for a measurement.
@ -43127,6 +43161,7 @@ Implementations:
| <a id="userstate"></a>`state` | [`UserState!`](#userstate) | State of the user. |
| <a id="userstatus"></a>`status` | [`UserStatus`](#userstatus) | User status. |
| <a id="usertwitter"></a>`twitter` | [`String`](#string) | X (formerly Twitter) username of the user. |
| <a id="usertype"></a>`type` | [`UserType!`](#usertype) | Type of the user. |
| <a id="useruserpermissions"></a>`userPermissions` | [`UserPermissions!`](#userpermissions) | Permissions for the current user on the resource. |
| <a id="useruserpreferences"></a>`userPreferences` | [`UserPreferences`](#userpreferences) | Preferences for the user. |
| <a id="userusername"></a>`username` | [`String!`](#string) | Username of the user. Unique within this instance of GitLab. |

View File

@ -171,7 +171,11 @@ Maintainers can:
- Unprotect a protected environment by selecting the **Unprotect** button for that environment.
After an environment is unprotected, all access entries are deleted and must
be re-entered if the environment is re-protected.
be re-entered if the environment is re-protected.
After an approval rule is deleted, previously approved deployments do not show who approved the deployment.
Information on who approved a deployment is still available in the [project audit events](../../user/compliance/audit_events.md#project-audit-events).
If a new rule is added, previous deployments show the new rules without the option to approve the deployment. [Issue 506687](https://gitlab.com/gitlab-org/gitlab/-/issues/506687) proposes to show the full approval history of deployments, even if an approval rule is deleted.
For more information, see [Deployment safety](deployment_safety.md).

View File

@ -562,6 +562,35 @@ You can read more about it in the [charts release page](https://docs.gitlab.com/
<div class="deprecation " data-milestone="18.0">
### Gitaly rate limiting
<div class="deprecation-notes">
- Announced in GitLab <span class="milestone">17.7</span>
- Removal in GitLab <span class="milestone">18.0</span>
- To discuss this change or learn more, see the [deprecation issue](https://gitlab.com/gitlab-org/gitaly/-/issues/5011).
</div>
Because of the highly variable nature of Git operations and repository latencies, Gitaly
[RPC-based rate limiting](https://docs.gitlab.com/ee/administration/gitaly/monitoring.html#monitor-gitaly-rate-limiting)
is ineffective. Configuring proper rate limits is challenging and often becomes obsolete quickly because harmful
actions rarely generate enough requests per second to stand out.
Gitaly already supports [concurrency limiting](https://docs.gitlab.com/ee/administration/gitaly/concurrency_limiting.html) and an
[adaptive limiting add-on](https://docs.gitlab.com/ee/administration/gitaly/concurrency_limiting.html#adaptive-concurrency-limiting),
which have proven to work well in production.
Because Gitaly is not directly exposed to external networks and external protection layers, such as load balancers,
provide better safeguards, rate limiting is less effective.
Therefore, we're depecating rate limiting in favor of the more reliable concurrency limiting. Gitaly RPC-based
rate limiting will be removed in GitLab 18.0.
</div>
<div class="deprecation " data-milestone="18.0">
### Group vulnerability report by OWASP top 10 2017 is deprecated
<div class="deprecation-notes">

View File

@ -140,6 +140,7 @@ This rule enforces the defined actions whenever the pipeline runs for a selected
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/152855) a new application setting `security_policy_scheduled_scans_max_concurrency` in GitLab 17.1. The concurrency limit applies when both the `scan_execution_pipeline_worker` and `scan_execution_pipeline_concurrency_control` are enabled.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/158636) a concurrency limit for scan execution scheduled jobs in GitLab 17.3 [with a flag](../../../administration/feature_flags.md) named `scan_execution_pipeline_concurrency_control`.
> - [Enabled](https://gitlab.com/gitlab-org/gitlab/-/issues/451890) the `scan_execution_pipeline_worker` feature flag on GitLab.com in GitLab 17.5.
> - [Feature flag](https://gitlab.com/gitlab-org/gitlab/-/issues/451890) `scan_execution_pipeline_worker` removed in GitLab 17.6.
> - [Enabled](https://gitlab.com/gitlab-org/gitlab/-/issues/463802) the `scan_execution_pipeline_concurrency_control` feature flag on GitLab.com in GitLab 17.6.
WARNING:

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillProtectedBranchMergeAccessLevelsProtectedBranchNamespaceId < BackfillDesiredShardingKeyJob
operation_name :backfill_protected_branch_merge_access_levels_protected_branch_namespace_id
feature_category :source_code_management
end
end
end

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillProtectedBranchMergeAccessLevelsProtectedBranchProjectId < BackfillDesiredShardingKeyJob
operation_name :backfill_protected_branch_merge_access_levels_protected_branch_project_id
feature_category :source_code_management
end
end
end

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillStatusPagePublishedIncidentsNamespaceId < BackfillDesiredShardingKeyJob
operation_name :backfill_status_page_published_incidents_namespace_id
feature_category :incident_management
end
end
end

View File

@ -95,7 +95,7 @@ module Gitlab
tags.each do |tag|
accumulator[:taggings] << tagging_attributes(tag, taggable) if polymorphic_taggings_available?
if monomorphic_taggings_available?
if monomorphic_taggings_available?(taggable)
accumulator[:monomorphic_taggings] << monomorphic_taggings_record(tag, taggable)
end
end
@ -129,8 +129,8 @@ module Gitlab
end
end
def monomorphic_taggings_available?
config.monomorphic_taggings?
def monomorphic_taggings_available?(taggable)
config.monomorphic_taggings?(taggable)
end
def polymorphic_taggings_available?

View File

@ -37,7 +37,7 @@ module Gitlab
true
end
def monomorphic_taggings?
def monomorphic_taggings?(_taggable)
true
end
end

View File

@ -23,7 +23,8 @@ module Gitlab
def strategies
[
BuildsTagsConfiguration
BuildsTagsConfiguration,
RunnerTaggingsConfiguration
]
end
end

View File

@ -15,7 +15,7 @@ module Gitlab
true
end
def monomorphic_taggings?
def monomorphic_taggings?(_taggable)
false
end
end

View File

@ -0,0 +1,51 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Tags
class BulkInsert
class RunnerTaggingsConfiguration
include ::Gitlab::Utils::StrongMemoize
def self.applies_to?(record)
record.is_a?(::Ci::Runner)
end
def self.build_from(runner)
new(runner)
end
def initialize(runner)
@runner = runner
end
def join_model
::Ci::RunnerTagging
end
def unique_by
[:tag_id, :runner_id, :runner_type]
end
def attributes_map(runner)
{
runner_id: runner.id,
runner_type: runner.runner_type,
sharding_key_id: runner.sharding_key_id
}
end
def polymorphic_taggings?
true
end
def monomorphic_taggings?(runner)
strong_memoize_with(:monomorphic_taggings, runner.owner) do
::Feature.enabled?(:write_to_ci_runner_taggings, runner.owner)
end
end
end
end
end
end
end

View File

@ -25285,6 +25285,12 @@ msgstr ""
msgid "GlobalSearch|Archived"
msgstr ""
msgid "GlobalSearch|Author"
msgstr ""
msgid "GlobalSearch|Author not included"
msgstr ""
msgid "GlobalSearch|Branch not included"
msgstr ""
@ -48297,7 +48303,7 @@ msgstr ""
msgid "ScanExecutionPolicy|Add new condition"
msgstr ""
msgid "ScanExecutionPolicy|Are you sure you want to create merge request fot this policy?"
msgid "ScanExecutionPolicy|Are you sure you want to create merge request for this policy?"
msgstr ""
msgid "ScanExecutionPolicy|Back to edit policy"
@ -59074,8 +59080,10 @@ msgstr ""
msgid "Unlimited"
msgstr ""
msgid "UnlimitedMembersDuringTrialAlert|During your trial, invite as many members as you like to %{group_or_project} to collaborate with you."
msgstr ""
msgid "UnlimitedMembersDuringTrialAlert|During your trial, invite as many members as you like to %{name} to collaborate with you. When your trial ends, you'll have a maximum of %{limit} member on the Free tier, or you can get more by upgrading to a paid tier."
msgid_plural "UnlimitedMembersDuringTrialAlert|During your trial, invite as many members as you like to %{name} to collaborate with you. When your trial ends, you'll have a maximum of %{limit} members on the Free tier, or you can get more by upgrading to a paid tier."
msgstr[0] ""
msgstr[1] ""
msgid "UnlimitedMembersDuringTrialAlert|Explore paid plans"
msgstr ""
@ -62310,11 +62318,6 @@ msgstr ""
msgid "When you transfer your project to a group, you can easily manage multiple projects, view usage quotas for storage, compute minutes, and users, and start a trial or upgrade to a paid tier."
msgstr ""
msgid "When your trial ends, you'll have a maximum of %d member on the Free tier, or you can get more by upgrading to a paid tier."
msgid_plural "When your trial ends, you'll have a maximum of %d members on the Free tier, or you can get more by upgrading to a paid tier."
msgstr[0] ""
msgstr[1] ""
msgid "When your trial ends, you'll move to the Free tier, which has a limit of %{free_user_limit} seat. %{free_user_limit} seat will remain active, and members not occupying a seat will have the %{link_start}Over limit status%{link_end} and lose access to this group."
msgid_plural "When your trial ends, you'll move to the Free tier, which has a limit of %{free_user_limit} seats. %{free_user_limit} seats will remain active, and members not occupying a seat will have the %{link_start}Over limit status%{link_end} and lose access to this group."
msgstr[0] ""

View File

@ -412,6 +412,7 @@ RSpec.describe 'Database schema',
# These pre-existing columns does not use a schema validation yet
let(:ignored_jsonb_columns_map) do
{
"Ai::Conversation::Message" => %w[extras error_details],
"ApplicationSetting" => %w[repository_storages_weighted],
"AlertManagement::Alert" => %w[payload],
"Ci::BuildMetadata" => %w[config_options config_variables],

View File

@ -1739,15 +1739,60 @@ export const mockDataForBlobBody = {
export const mockSourceBranches = [
{
text: 'master',
value: 'master',
text: 'Master Item',
value: 'master-item',
},
{
text: 'feature',
value: 'feature',
text: 'Feature Item',
value: 'feature-item',
},
{
text: 'develop',
value: 'develop',
text: 'Develop Item',
value: 'develop-item',
},
];
export const mockAuthorsAxiosResponse = [
{
id: 1,
username: 'root',
name: 'Administrator',
state: 'active',
locked: false,
avatar_url:
'https://www.gravatar.com/avatar/8a2ba320206c6d79e89dd41a9081b7ae521d365f2054b3db1ac6462f692b176f?s=80&d=identicon',
web_url: 'http://127.0.0.1:3000/root',
status_tooltip_html: null,
show_status: false,
availability: null,
path: '/root',
},
{
id: 65,
username: 'john',
name: 'John Doe',
state: 'active',
locked: false,
avatar_url:
'https://www.gravatar.com/avatar/d9165b0da62fb9f9a57214a8fcc333101f2d10f494c662b53ffbeded3dcfa0dd?s=80&d=identicon',
web_url: 'http://127.0.0.1:3000/john',
status_tooltip_html: null,
show_status: false,
availability: null,
path: '/john',
},
{
id: 50,
username: 'jane',
name: 'Jane Doe',
state: 'active',
locked: false,
avatar_url:
'https://www.gravatar.com/avatar/224e81a612a566f3eb211d1d457b2335b662ad0dc7bb8d1b642056dd1b81755c?s=80&d=identicon',
web_url: 'http://127.0.0.1:3000/jane',
status_tooltip_html: null,
show_status: false,
availability: null,
path: '/jane',
},
];

View File

@ -0,0 +1,173 @@
import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import { GlFormCheckbox } from '@gitlab/ui';
import axios from '~/lib/utils/axios_utils';
import AjaxCache from '~/lib/utils/ajax_cache';
import AuthorFilter from '~/search/sidebar/components/author_filter/index.vue';
import FilterDropdown from '~/search/sidebar/components/shared/filter_dropdown.vue';
import { MOCK_QUERY, mockAuthorsAxiosResponse } from '../../mock_data';
Vue.use(Vuex);
describe('Author filter', () => {
let wrapper;
const mock = new MockAdapter(axios);
const actions = {
setQuery: jest.fn(),
applyQuery: jest.fn(),
};
const defaultState = {
query: {
scope: 'merge_requests',
group_id: 1,
search: '*',
},
};
const createComponent = (state) => {
const store = new Vuex.Store({
...defaultState,
state,
actions,
});
wrapper = shallowMount(AuthorFilter, {
store,
});
};
const findFilterDropdown = () => wrapper.findComponent(FilterDropdown);
const findGlFormCheckbox = () => wrapper.findComponent(GlFormCheckbox);
beforeEach(() => {
createComponent();
});
describe('when initial state', () => {
it('renders the component', () => {
expect(findFilterDropdown().exists()).toBe(true);
expect(findGlFormCheckbox().exists()).toBe(true);
});
});
describe.each(['not[author_username]', 'author_username'])(
`when author is selected for %s author search`,
(authorParam) => {
beforeEach(async () => {
mock
.onGet('/-/autocomplete/users.json?current_user=true&active=true&search=')
.reply(200, mockAuthorsAxiosResponse);
createComponent({
query: {
...MOCK_QUERY,
[authorParam]: 'root',
},
});
findFilterDropdown().vm.$emit('selected', 'root');
await nextTick();
});
it('renders the component with selected options', () => {
expect(findFilterDropdown().props('selectedItem')).toBe('root');
expect(findGlFormCheckbox().attributes('checked')).toBe(
authorParam === 'not[author_username]' ? 'true' : undefined,
);
});
it('displays the correct placeholder text and icon', () => {
expect(findFilterDropdown().props('searchText')).toBe('Administrator');
expect(findFilterDropdown().props('icon')).toBe('user');
});
},
);
describe('when opening dropdown', () => {
beforeEach(() => {
jest.spyOn(axios, 'get');
jest.spyOn(AjaxCache, 'retrieve');
createComponent({
groupInitialJson: {
id: 1,
full_name: 'gitlab-org/gitlab-test',
full_path: 'gitlab-org/gitlab-test',
},
});
});
afterEach(() => {
mock.restore();
});
it('calls AjaxCache with correct params', () => {
findFilterDropdown().vm.$emit('shown');
expect(AjaxCache.retrieve).toHaveBeenCalledWith(
'/-/autocomplete/users.json?current_user=true&active=true&group_id=1&search=',
);
});
});
describe.each([false, true])('when selecting an author with %s', (toggle) => {
beforeEach(() => {
createComponent({
query: {
...MOCK_QUERY,
},
});
});
it('calls setQuery with the correct params', () => {
const authorParam = 'author_username';
const authorNotParam = 'not[author_username]';
wrapper.vm.toggleState = !toggle;
findFilterDropdown().vm.$emit('selected', 'root');
expect(actions.setQuery).toHaveBeenCalledTimes(2);
expect(actions.setQuery.mock.calls).toMatchObject([
[
expect.anything(),
{
key: toggle ? authorParam : authorNotParam,
value: 'root',
},
],
[
expect.anything(),
{
key: toggle ? authorNotParam : authorParam,
value: '',
},
],
]);
});
});
describe('when resetting selected author', () => {
beforeEach(() => {
createComponent();
});
it(`calls setQuery with correct param`, () => {
findFilterDropdown().vm.$emit('reset');
expect(actions.setQuery).toHaveBeenCalledWith(expect.anything(), {
key: 'author_username',
value: '',
});
expect(actions.setQuery).toHaveBeenCalledWith(expect.anything(), {
key: 'not[author_username]',
value: '',
});
expect(actions.applyQuery).toHaveBeenCalled();
});
});
});

View File

@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
import { GlCollapsibleListbox, GlListboxItem, GlIcon } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import FilterDropdown from '~/search/sidebar/components/shared/filter_dropdown.vue';
import waitForPromises from 'helpers/wait_for_promises';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
@ -11,7 +11,7 @@ describe('BranchDropdown', () => {
const defaultProps = {
listData: mockSourceBranches,
errors: [],
error: '',
selectedItem: 'Master Item',
headerText: 'Filter header',
searchText: 'Search filter items',
@ -20,8 +20,8 @@ describe('BranchDropdown', () => {
isLoading: false,
};
const createComponent = (props = {}, options = {}) => {
wrapper = shallowMount(FilterDropdown, {
const createComponent = (props = {}) => {
wrapper = shallowMountExtended(FilterDropdown, {
propsData: {
...defaultProps,
...props,
@ -30,13 +30,12 @@ describe('BranchDropdown', () => {
GlCollapsibleListbox,
GlIcon,
},
...options,
});
};
const findGlCollapsibleListbox = () => wrapper.findComponent(GlCollapsibleListbox);
const findGlListboxItems = () => wrapper.findAllComponents(GlListboxItem);
const findErrorMessages = () => wrapper.findAll('[data-testid="branch-dropdown-error-list"]');
const findErrorMessage = () => wrapper.findByTestId('branch-dropdown-error');
describe('when nothing is selected', () => {
beforeEach(() => {
@ -71,18 +70,24 @@ describe('BranchDropdown', () => {
expect(props.resetButtonLabel).toBe('Reset');
});
it('renders error messages when errors prop is passed', async () => {
const errors = ['Error 1', 'Error 2'];
createComponent({ errors });
it('renders error message when error prop is passed', async () => {
createComponent({ error: 'Error 1' });
await waitForPromises();
expect(findErrorMessage().exists()).toBe(true);
expect(findErrorMessage().text()).toBe('Error 1');
});
const errorMessages = findErrorMessages();
it('renders error message reactivly', async () => {
createComponent();
expect(errorMessages.length).toBe(errors.length);
errorMessages.wrappers.forEach((errorWrapper, index) => {
expect(errorWrapper.text()).toContain(errors[index]);
});
await waitForPromises();
expect(findErrorMessage().exists()).toBe(false);
wrapper.setProps({ error: 'Error 1' });
await waitForPromises();
expect(findErrorMessage().exists()).toBe(true);
expect(findErrorMessage().text()).toBe('Error 1');
});
it('search filters items', async () => {

View File

@ -8,6 +8,7 @@ import StatusFilter from '~/search/sidebar/components/status_filter/index.vue';
import ArchivedFilter from '~/search/sidebar/components/archived_filter/index.vue';
import SourceBranchFilter from '~/search/sidebar/components/source_branch_filter/index.vue';
import LabelFilter from '~/search/sidebar/components/label_filter/index.vue';
import AuthorFilter from '~/search/sidebar/components/author_filter/index.vue';
Vue.use(Vuex);
@ -21,7 +22,12 @@ describe('GlobalSearch MergeRequestsFilters', () => {
const createComponent = (
initialState = {},
provide = { glFeatures: { searchMrFilterSourceBranch: true } },
provide = {
glFeatures: {
searchMrFilterSourceBranch: true,
searchMrFilterAuthor: true,
},
},
) => {
const store = new Vuex.Store({
state: {
@ -45,8 +51,9 @@ describe('GlobalSearch MergeRequestsFilters', () => {
const findArchivedFilter = () => wrapper.findComponent(ArchivedFilter);
const findSourceBranchFilter = () => wrapper.findComponent(SourceBranchFilter);
const findLabelFilter = () => wrapper.findComponent(LabelFilter);
const findAuthorFilter = () => wrapper.findComponent(AuthorFilter);
describe('Renders correctly with Archived Filter', () => {
describe('When renders correctly with Archived Filter', () => {
beforeEach(() => {
createComponent();
});
@ -59,16 +66,20 @@ describe('GlobalSearch MergeRequestsFilters', () => {
expect(findArchivedFilter().exists()).toBe(true);
});
it('renders sourceBranchFilter', () => {
it('renders SourceBranchFilter', () => {
expect(findSourceBranchFilter().exists()).toBe(true);
});
it('renders label filter', () => {
it('renders LabelFilter', () => {
expect(findLabelFilter().exists()).toBe(true);
});
it('renders AuthorFilter', () => {
expect(findAuthorFilter().exists()).toBe(true);
});
});
describe('Renders correctly with basic search', () => {
describe('When renders correctly with basic search', () => {
beforeEach(() => {
createComponent({ searchType: 'basic' });
});
@ -81,35 +92,46 @@ describe('GlobalSearch MergeRequestsFilters', () => {
expect(findArchivedFilter().exists()).toBe(true);
});
it('renders sourceBranchFilter', () => {
it('renders SourceBranchFilter', () => {
expect(findSourceBranchFilter().exists()).toBe(true);
});
it('will not render label filter', () => {
it('will not render LabelFilter', () => {
expect(findLabelFilter().exists()).toBe(false);
});
it('will not render AuthorFilter', () => {
expect(findAuthorFilter().exists()).toBe(false);
});
});
describe.each([true, false])(
'When feature flag search_mr_filter_source_branch is',
(searchMrFilterSourceBranch) => {
beforeEach(() => {
createComponent(null, { glFeatures: { searchMrFilterSourceBranch } });
});
describe('When feature flag search_mr_filter_source_branch is disabled', () => {
beforeEach(() => {
createComponent(null, { glFeatures: { searchMrFilterSourceBranch: false } });
});
it(`${searchMrFilterSourceBranch ? 'will' : 'will not'} render sourceBranchFilter`, () => {
expect(findSourceBranchFilter().exists()).toBe(searchMrFilterSourceBranch);
});
},
);
it(`will not render SourceBranchFilter`, () => {
expect(findSourceBranchFilter().exists()).toBe(false);
});
});
describe('hasMissingProjectContext getter', () => {
describe('When feature flag search_mr_filter_author is disabled', () => {
beforeEach(() => {
createComponent(null, { glFeatures: { searchMrFilterAuthor: false } });
});
it(`will not render AuthorFilter`, () => {
expect(findAuthorFilter().exists()).toBe(false);
});
});
describe('#hasMissingProjectContext getter', () => {
beforeEach(() => {
defaultGetters.hasMissingProjectContext = () => false;
createComponent();
});
it('hides archived filter', () => {
it('hides ArchivedFilter', () => {
expect(findArchivedFilter().exists()).toBe(false);
});
});

View File

@ -62,6 +62,7 @@ RSpec.describe GitlabSchema.types['User'], feature_category: :user_profile do
pronouns
ide
userPreferences
type
]
expect(described_class).to include_graphql_fields(*expected_fields)
@ -397,4 +398,56 @@ RSpec.describe GitlabSchema.types['User'], feature_category: :user_profile do
is_expected.to have_graphql_type(Types::UserPreferencesType)
end
end
describe 'type field' do
subject { described_class.fields['type'] }
let_it_be(:admin) { create(:user, :admin) }
let_it_be(:regular_user) { create(:user) }
let_it_be(:placeholder_user) { create(:user, :placeholder) }
let_it_be(:import_user) { create(:user, :import_user) }
let_it_be(:ghost_user) { create(:user, :ghost) }
let(:query) do
<<~GQL
query($id: UserID!) {
user(id: $id) {
type
}
}
GQL
end
it 'returns type field' do
is_expected.to have_graphql_type(Types::Users::TypeEnum.to_non_null_type)
end
it 'returns HUMAN for regular users' do
result = GitlabSchema.execute(query, variables: { id: regular_user.to_global_id.to_s },
context: { current_user: admin }).as_json
expect(result.dig('data', 'user', 'type')).to eq('HUMAN')
end
it 'returns PLACEHOLDER for placeholder users' do
result = GitlabSchema.execute(query, variables: { id: placeholder_user.to_global_id.to_s },
context: { current_user: admin }).as_json
expect(result.dig('data', 'user', 'type')).to eq('PLACEHOLDER')
end
it 'returns IMPORT_USER for import users' do
result = GitlabSchema.execute(query, variables: { id: import_user.to_global_id.to_s },
context: { current_user: admin }).as_json
expect(result.dig('data', 'user', 'type')).to eq('IMPORT_USER')
end
it 'returns GHOST for ghost users' do
result = GitlabSchema.execute(query, variables: { id: ghost_user.to_global_id.to_s },
context: { current_user: admin }).as_json
expect(result.dig('data', 'user', 'type')).to eq('GHOST')
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillProtectedBranchMergeAccessLevelsProtectedBranchNamespaceId,
feature_category: :source_code_management,
schema: 20241204130226 do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :protected_branch_merge_access_levels }
let(:backfill_column) { :protected_branch_namespace_id }
let(:backfill_via_table) { :protected_branches }
let(:backfill_via_column) { :namespace_id }
let(:backfill_via_foreign_key) { :protected_branch_id }
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillProtectedBranchMergeAccessLevelsProtectedBranchProjectId,
feature_category: :source_code_management,
schema: 20241204130221 do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :protected_branch_merge_access_levels }
let(:backfill_column) { :protected_branch_project_id }
let(:backfill_via_table) { :protected_branches }
let(:backfill_via_column) { :project_id }
let(:backfill_via_foreign_key) { :protected_branch_id }
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillStatusPagePublishedIncidentsNamespaceId,
feature_category: :incident_management,
schema: 20241205143056 do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :status_page_published_incidents }
let(:backfill_column) { :namespace_id }
let(:backfill_via_table) { :issues }
let(:backfill_via_column) { :namespace_id }
let(:backfill_via_foreign_key) { :issue_id }
end
end

View File

@ -2,208 +2,223 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Tags::BulkInsert do
RSpec.describe Gitlab::Ci::Tags::BulkInsert, feature_category: :continuous_integration do
using RSpec::Parameterized::TableSyntax
let_it_be(:project) { create(:project, :repository) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be_with_refind(:job) { create(:ci_build, :unique_name, pipeline: pipeline) }
let_it_be_with_refind(:other_job) { create(:ci_build, :unique_name, pipeline: pipeline) }
let(:statuses) { [job, other_job] }
let_it_be_with_refind(:runner) { create(:ci_runner) }
let_it_be_with_refind(:other_runner) { create(:ci_runner, :project_type, projects: [project]) }
let(:statuses) { [taggable, other_taggable] }
let(:config) { described_class::NoConfig.new }
subject(:service) { described_class.new(statuses, config: config) }
describe '.bulk_insert_tags!' do
let(:inserter) { instance_double(described_class) }
it 'delegates to bulk insert class' do
expect(described_class)
.to receive(:new)
.with(statuses, config: nil)
.and_return(inserter)
expect(inserter).to receive(:insert!)
described_class.bulk_insert_tags!(statuses)
end
where(:taggable_class, :taggable, :other_taggable, :tagging_class, :taggable_id_column, :partition_column,
:expected_configuration) do
Ci::Build | ref(:job) | ref(:other_job) | Ci::BuildTag | :build_id | :partition_id |
described_class::BuildsTagsConfiguration
Ci::Runner | ref(:runner) | ref(:other_runner) | Ci::RunnerTagging | :runner_id | :runner_type |
described_class::RunnerTaggingsConfiguration
end
describe '#insert!' do
context 'without tags' do
it { expect(service.insert!).to be_truthy }
with_them do
describe '.bulk_insert_tags!' do
let(:inserter) { instance_double(described_class) }
it 'delegates to bulk insert class' do
expect(described_class)
.to receive(:new)
.with(statuses, config: nil)
.and_return(inserter)
expect(inserter).to receive(:insert!)
described_class.bulk_insert_tags!(statuses)
end
end
context 'with tags' do
before do
job.tag_list = %w[tag1 tag2]
other_job.tag_list = %w[tag2 tag3 tag4]
describe '#insert!' do
context 'without tags' do
it { expect(service.insert!).to be_truthy }
end
it 'persists tags' do
expect(service.insert!).to be_truthy
expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
expect(other_job.reload.tag_list).to match_array(%w[tag2 tag3 tag4])
end
it 'persists taggings' do
service.insert!
expect(job.taggings.size).to eq(2)
expect(other_job.taggings.size).to eq(3)
expect(Ci::Build.tagged_with('tag1')).to include(job)
expect(Ci::Build.tagged_with('tag2')).to include(job, other_job)
expect(Ci::Build.tagged_with('tag3')).to include(other_job)
end
it 'strips tags' do
job.tag_list = [' taga', 'tagb ', ' tagc ']
service.insert!
expect(job.tags.map(&:name)).to match_array(%w[taga tagb tagc])
end
context 'when batching inserts for tags' do
context 'with tags' do
before do
stub_const("#{described_class}::TAGS_BATCH_SIZE", 2)
taggable.tag_list = %w[tag1 tag2]
other_taggable.tag_list = %w[tag2 tag3 tag4]
end
it 'inserts tags in batches' do
recorder = ActiveRecord::QueryRecorder.new { service.insert! }
count = recorder.log.count { |query| query.include?('INSERT INTO "tags"') }
it 'persists tags' do
expect(service.insert!).to be_truthy
expect(count).to eq(2)
end
end
context 'when batching inserts for taggings' do
before do
stub_const("#{described_class}::TAGGINGS_BATCH_SIZE", 2)
expect(taggable.reload.tag_list).to match_array(%w[tag1 tag2])
expect(other_taggable.reload.tag_list).to match_array(%w[tag2 tag3 tag4])
end
it 'inserts taggings in batches' do
recorder = ActiveRecord::QueryRecorder.new { service.insert! }
count = recorder.log.count { |query| query.include?('INSERT INTO "taggings"') }
expect(count).to eq(3)
end
end
context 'with no config provided' do
it 'does not persist tag links' do
it 'persists taggings' do
service.insert!
expect(job.tag_links).to be_empty
expect(other_job.tag_links).to be_empty
end
end
expect(taggable.taggings.size).to eq(2)
expect(other_taggable.taggings.size).to eq(3)
context 'with config provided by the factory' do
let(:config) { nil }
it 'generates a valid config' do
expect(service.config).to be_a(described_class::BuildsTagsConfiguration)
expect(taggable_class.tagged_with('tag1')).to include(taggable)
expect(taggable_class.tagged_with('tag2')).to include(taggable, other_taggable)
expect(taggable_class.tagged_with('tag3')).to include(other_taggable)
end
context 'with flags' do
it 'strips tags' do
taggable.tag_list = [' taga', 'tagb ', ' tagc ']
service.insert!
expect(taggable.tags.map(&:name)).to match_array(%w[taga tagb tagc])
end
context 'when batching inserts for tags' do
before do
allow(service.config).to receive(:monomorphic_taggings?) { monomorphic_taggings }
allow(service.config).to receive(:polymorphic_taggings?) { polymorphic_taggings }
stub_const("#{described_class}::TAGS_BATCH_SIZE", 2)
end
context 'when writing to both tables' do
let(:monomorphic_taggings) { true }
let(:polymorphic_taggings) { true }
it 'inserts tags in batches' do
recorder = ActiveRecord::QueryRecorder.new { service.insert! }
count = recorder.log.count { |query| query.include?('INSERT INTO "tags"') }
it 'persists tag links and taggings' do
service.insert!
expect(count).to eq(2)
end
end
expect(job.tag_links).not_to be_empty
expect(other_job.tag_links).not_to be_empty
context 'when batching inserts for taggings' do
before do
stub_const("#{described_class}::TAGGINGS_BATCH_SIZE", 2)
end
expect(jobs_tagged_with('tag1')).to contain_exactly(job)
expect(jobs_tagged_with('tag2')).to contain_exactly(job, other_job)
expect(jobs_tagged_with('tag3')).to contain_exactly(other_job)
it 'inserts taggings in batches' do
recorder = ActiveRecord::QueryRecorder.new { service.insert! }
count = recorder.log.count { |query| query.include?('INSERT INTO "taggings"') }
expect(job.taggings).not_to be_empty
expect(other_job.taggings).not_to be_empty
expect(count).to eq(3)
end
end
expect(Ci::Build.tagged_with('tag1')).to contain_exactly(job)
expect(Ci::Build.tagged_with('tag2')).to contain_exactly(job, other_job)
expect(Ci::Build.tagged_with('tag3')).to contain_exactly(other_job)
context 'with no config provided' do
it 'does not persist tag links' do
service.insert!
expect(taggable.tag_links).to be_empty
expect(other_taggable.tag_links).to be_empty
end
end
context 'with config provided by the factory' do
let(:config) { nil }
it 'generates a valid config' do
expect(service.config).to be_a(expected_configuration)
end
context 'with flags' do
before do
allow(service.config).to receive(:monomorphic_taggings?) { monomorphic_taggings }
allow(service.config).to receive(:polymorphic_taggings?) { polymorphic_taggings }
end
end
context 'when writing only to taggings' do
let(:monomorphic_taggings) { false }
let(:polymorphic_taggings) { true }
context 'when writing to both tables' do
let(:monomorphic_taggings) { true }
let(:polymorphic_taggings) { true }
it 'persists taggings' do
service.insert!
it 'persists tag links and taggings' do
service.insert!
expect(job.tag_links).to be_empty
expect(other_job.tag_links).to be_empty
expect(taggable.tag_links).not_to be_empty
expect(other_taggable.tag_links).not_to be_empty
expect(job.taggings).not_to be_empty
expect(other_job.taggings).not_to be_empty
expect(tagged_with('tag1')).to contain_exactly(taggable)
expect(tagged_with('tag2')).to contain_exactly(taggable, other_taggable)
expect(tagged_with('tag3')).to contain_exactly(other_taggable)
expect(Ci::Build.tagged_with('tag1')).to contain_exactly(job)
expect(Ci::Build.tagged_with('tag2')).to contain_exactly(job, other_job)
expect(Ci::Build.tagged_with('tag3')).to contain_exactly(other_job)
expect(taggable.taggings).not_to be_empty
expect(other_taggable.taggings).not_to be_empty
expect(taggable_class.tagged_with('tag1')).to contain_exactly(taggable)
expect(taggable_class.tagged_with('tag2')).to contain_exactly(taggable, other_taggable)
expect(taggable_class.tagged_with('tag3')).to contain_exactly(other_taggable)
end
end
end
context 'when writing only to link table' do
let(:monomorphic_taggings) { true }
let(:polymorphic_taggings) { false }
context 'when writing only to taggings' do
let(:monomorphic_taggings) { false }
let(:polymorphic_taggings) { true }
it 'persists tag links' do
service.insert!
it 'persists taggings' do
service.insert!
expect(job.tag_links).not_to be_empty
expect(other_job.tag_links).not_to be_empty
expect(taggable.tag_links).to be_empty
expect(other_taggable.tag_links).to be_empty
expect(jobs_tagged_with('tag1')).to contain_exactly(job)
expect(jobs_tagged_with('tag2')).to contain_exactly(job, other_job)
expect(jobs_tagged_with('tag3')).to contain_exactly(other_job)
expect(taggable.taggings).not_to be_empty
expect(other_taggable.taggings).not_to be_empty
expect(job.taggings).to be_empty
expect(other_job.taggings).to be_empty
expect(taggable_class.tagged_with('tag1')).to contain_exactly(taggable)
expect(taggable_class.tagged_with('tag2')).to contain_exactly(taggable, other_taggable)
expect(taggable_class.tagged_with('tag3')).to contain_exactly(other_taggable)
end
end
end
def jobs_tagged_with(tag)
scope = Ci::BuildTag
.where(tag_id: Ci::Tag.where(name: tag))
.where(Ci::BuildTag.arel_table[:build_id].eq(Ci::Build.arel_table[:id]))
.where(Ci::BuildTag.arel_table[:partition_id].eq(Ci::Build.arel_table[:partition_id]))
context 'when writing only to link table' do
let(:monomorphic_taggings) { true }
let(:polymorphic_taggings) { false }
Ci::Build.where_exists(scope)
it 'persists tag links' do
service.insert!
expect(taggable.tag_links).not_to be_empty
expect(other_taggable.tag_links).not_to be_empty
expect(tagged_with('tag1')).to contain_exactly(taggable)
expect(tagged_with('tag2')).to contain_exactly(taggable, other_taggable)
expect(tagged_with('tag3')).to contain_exactly(other_taggable)
expect(taggable.taggings).to be_empty
expect(other_taggable.taggings).to be_empty
end
end
def tagged_with(tag)
scope = tagging_class
.where(tag_id: Ci::Tag.where(name: tag))
.where(tagging_class.arel_table[taggable_id_column].eq(taggable_class.arel_table[:id]))
.where(tagging_class.arel_table[partition_column].eq(taggable_class.arel_table[partition_column]))
taggable_class.where_exists(scope)
end
end
end
end
end
context 'with tags for only one job' do
before do
job.tag_list = %w[tag1 tag2]
end
context 'with tags for only one taggable' do
before do
taggable.tag_list = %w[tag1 tag2]
end
it 'persists tags' do
expect(service.insert!).to be_truthy
it 'persists tags' do
expect(service.insert!).to be_truthy
expect(job.reload.tag_list).to match_array(%w[tag1 tag2])
expect(other_job.reload.tag_list).to be_empty
end
expect(taggable.reload.tag_list).to match_array(%w[tag1 tag2])
expect(other_taggable.reload.tag_list).to be_empty
end
it 'persists taggings' do
service.insert!
it 'persists taggings' do
service.insert!
expect(job.taggings.size).to eq(2)
expect(taggable.taggings.size).to eq(2)
expect(Ci::Build.tagged_with('tag1')).to include(job)
expect(Ci::Build.tagged_with('tag2')).to include(job)
expect(taggable_class.tagged_with('tag1')).to include(taggable)
expect(taggable_class.tagged_with('tag2')).to include(taggable)
end
end
end
end

Some files were not shown because too many files have changed in this diff Show More