Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-09-24 18:19:20 +00:00
parent 028bf690c4
commit d68c057b16
92 changed files with 1797 additions and 327 deletions

View File

@ -1436,7 +1436,6 @@ lib/gitlab/checks/**
/app/uploaders/job_artifact_uploader.rb
/app/validators/json_schemas/build_metadata_id_tokens.json
/app/workers/build_queue_worker.rb
/app/workers/ci_platform_metrics_update_cron_worker.rb
/app/workers/create_pipeline_worker.rb
/app/workers/expire_build_artifacts_worker.rb
/app/workers/pipeline_hooks_worker.rb

View File

@ -1966,7 +1966,6 @@ Gitlab/BoundedContexts:
- 'app/workers/chaos/leak_mem_worker.rb'
- 'app/workers/chaos/sleep_worker.rb'
- 'app/workers/chat_notification_worker.rb'
- 'app/workers/ci_platform_metrics_update_cron_worker.rb'
- 'app/workers/cleanup_container_repository_worker.rb'
- 'app/workers/click_house/audit_event_partition_sync_worker.rb'
- 'app/workers/click_house/audit_events_sync_worker.rb'

View File

@ -705,7 +705,6 @@ Gitlab/NamespacedClass:
- 'app/workers/build_queue_worker.rb'
- 'app/workers/bulk_import_worker.rb'
- 'app/workers/chat_notification_worker.rb'
- 'app/workers/ci_platform_metrics_update_cron_worker.rb'
- 'app/workers/cleanup_container_repository_worker.rb'
- 'app/workers/cluster_configure_istio_worker.rb'
- 'app/workers/cluster_install_app_worker.rb'

View File

@ -991,7 +991,6 @@ Layout/LineLength:
- 'ee/app/workers/audit_events/audit_event_streaming_worker.rb'
- 'ee/app/workers/concerns/elastic/migration_backfill_helper.rb'
- 'ee/app/workers/concerns/elastic/migration_obsolete.rb'
- 'ee/app/workers/elastic/migration_worker.rb'
- 'ee/app/workers/elastic_delete_project_worker.rb'
- 'ee/app/workers/elastic_namespace_rollout_worker.rb'
- 'ee/app/workers/geo/destroy_worker.rb'
@ -2136,7 +2135,6 @@ Layout/LineLength:
- 'ee/spec/workers/ci/minutes/update_project_and_namespace_usage_worker_spec.rb'
- 'ee/spec/workers/ci/upstream_projects_subscriptions_cleanup_worker_spec.rb'
- 'ee/spec/workers/compliance_management/merge_requests/compliance_violations_worker_spec.rb'
- 'ee/spec/workers/elastic/migration_worker_spec.rb'
- 'ee/spec/workers/geo/destroy_worker_spec.rb'
- 'ee/spec/workers/geo/prune_event_log_worker_spec.rb'
- 'ee/spec/workers/geo/secondary/registry_consistency_worker_spec.rb'

View File

@ -828,7 +828,6 @@ RSpec/ContextWording:
- 'ee/spec/workers/ci/runners/stale_group_runners_prune_cron_worker_spec.rb'
- 'ee/spec/workers/ci/upstream_projects_subscriptions_cleanup_worker_spec.rb'
- 'ee/spec/workers/ee/repository_check/batch_worker_spec.rb'
- 'ee/spec/workers/elastic/migration_worker_spec.rb'
- 'ee/spec/workers/elastic_index_bulk_cron_worker_spec.rb'
- 'ee/spec/workers/elastic_indexing_control_worker_spec.rb'
- 'ee/spec/workers/geo/prune_event_log_worker_spec.rb'

View File

@ -1112,7 +1112,6 @@ RSpec/NamedSubject:
- 'ee/spec/workers/ee/issuable/related_links_create_worker_spec.rb'
- 'ee/spec/workers/ee/issuable_export_csv_worker_spec.rb'
- 'ee/spec/workers/ee/repository_check/batch_worker_spec.rb'
- 'ee/spec/workers/elastic/migration_worker_spec.rb'
- 'ee/spec/workers/elastic/namespace_update_worker_spec.rb'
- 'ee/spec/workers/elastic_full_index_worker_spec.rb'
- 'ee/spec/workers/elastic_indexing_control_worker_spec.rb'

View File

@ -52,7 +52,6 @@ SidekiqLoadBalancing/WorkerDataConsistency:
- 'app/workers/ci/stuck_builds/drop_running_worker.rb'
- 'app/workers/ci/stuck_builds/drop_scheduled_worker.rb'
- 'app/workers/ci/test_failure_history_worker.rb'
- 'app/workers/ci_platform_metrics_update_cron_worker.rb'
- 'app/workers/cleanup_container_repository_worker.rb'
- 'app/workers/cluster_configure_istio_worker.rb'
- 'app/workers/cluster_install_app_worker.rb'
@ -317,7 +316,6 @@ SidekiqLoadBalancing/WorkerDataConsistency:
- 'ee/app/workers/dependencies/export_worker.rb'
- 'ee/app/workers/deployments/auto_rollback_worker.rb'
- 'ee/app/workers/dora/daily_metrics/refresh_worker.rb'
- 'ee/app/workers/elastic/migration_worker.rb'
- 'ee/app/workers/elastic_association_indexer_worker.rb'
- 'ee/app/workers/elastic_cluster_reindexing_cron_worker.rb'
- 'ee/app/workers/elastic_commit_indexer_worker.rb'

View File

@ -320,7 +320,6 @@ Style/GuardClause:
- 'ee/app/validators/user_existence_validator.rb'
- 'ee/app/workers/ee/ci/build_finished_worker.rb'
- 'ee/app/workers/ee/post_receive.rb'
- 'ee/app/workers/elastic/migration_worker.rb'
- 'ee/app/workers/elastic_namespace_rollout_worker.rb'
- 'ee/app/workers/epics/new_epic_issue_worker.rb'
- 'ee/app/workers/geo/scheduler/scheduler_worker.rb'

View File

@ -494,7 +494,6 @@ Style/IfUnlessModifier:
- 'ee/spec/support/helpers/feature_approval_helper.rb'
- 'ee/spec/support/helpers/search_results_helpers.rb'
- 'ee/spec/support/http_io/http_io_helpers.rb'
- 'ee/spec/workers/elastic/migration_worker_spec.rb'
- 'lib/api/api_guard.rb'
- 'lib/api/boards_responses.rb'
- 'lib/api/branches.rb'

View File

@ -796,7 +796,6 @@ Style/InlineDisableAnnotation:
- 'app/workers/ci/track_failed_build_worker.rb'
- 'app/workers/ci/unlock_pipelines_in_queue_worker.rb'
- 'app/workers/ci/update_locked_unknown_artifacts_worker.rb'
- 'app/workers/ci_platform_metrics_update_cron_worker.rb'
- 'app/workers/cluster_configure_istio_worker.rb'
- 'app/workers/cluster_install_app_worker.rb'
- 'app/workers/cluster_patch_app_worker.rb'

View File

@ -275,7 +275,7 @@ gem 'rack', '~> 2.2.9' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'rack-timeout', '~> 0.7.0', require: 'rack/timeout/base' # rubocop:todo Gemfile/MissingFeatureCategory
group :puma do
gem 'puma', '= 6.4.0', require: false, feature_category: :shared
gem 'puma', '= 6.4.3', require: false, feature_category: :shared
gem 'sd_notify', '~> 0.1.0', require: false # rubocop:todo Gemfile/MissingFeatureCategory
end

View File

@ -525,8 +525,8 @@
{"name":"pry-rails","version":"0.3.9","platform":"ruby","checksum":"468662575abb6b67f4a9831219f99290d5eae7bf186e64dd810d0a3e4a8cc4b1"},
{"name":"pry-shell","version":"0.6.4","platform":"ruby","checksum":"ad024882d29912b071a7de65ebea538b242d2dc1498c60c7c2352ef94769f208"},
{"name":"public_suffix","version":"6.0.1","platform":"ruby","checksum":"61d44e1cab5cbbbe5b31068481cf16976dd0dc1b6b07bd95617ef8c5e3e00c6f"},
{"name":"puma","version":"6.4.0","platform":"java","checksum":"eb27679e9e665882bab85dfa84704b0615b4f77cec46de014f05b90a5ab36cfe"},
{"name":"puma","version":"6.4.0","platform":"ruby","checksum":"d5dda11362744df9f4694708a62e3cfddf72eba7498c16016ebbb30f106712f9"},
{"name":"puma","version":"6.4.3","platform":"java","checksum":"373fcfacacaafd0f5a24db18cb99b3f2decb5c5316470169852559aa80adc8ab"},
{"name":"puma","version":"6.4.3","platform":"ruby","checksum":"24a4645c006811d83f2480057d1f54a96e7627b6b90e1c99b260b9dc630eb43e"},
{"name":"pyu-ruby-sasl","version":"0.0.3.3","platform":"ruby","checksum":"5683a6bc5738db5a1bf5ceddeaf545405fb241b4184dd4f2587e679a7e9497e5"},
{"name":"raabro","version":"1.4.0","platform":"ruby","checksum":"d4fa9ff5172391edb92b242eed8be802d1934b1464061ae5e70d80962c5da882"},
{"name":"racc","version":"1.6.2","platform":"java","checksum":"0880781e7dfde09e665d0b6160b583e01ed52fcc2955d7891447d33c2d1d2cf1"},

View File

@ -1451,7 +1451,7 @@ GEM
tty-markdown
tty-prompt
public_suffix (6.0.1)
puma (6.4.0)
puma (6.4.3)
nio4r (~> 2.0)
pyu-ruby-sasl (0.0.3.3)
raabro (1.4.0)
@ -2232,7 +2232,7 @@ DEPENDENCIES
pry-byebug
pry-rails (~> 0.3.9)
pry-shell (~> 0.6.4)
puma (= 6.4.0)
puma (= 6.4.3)
rack (~> 2.2.9)
rack-attack (~> 6.7.0)
rack-cors (~> 2.0.1)

View File

@ -535,8 +535,8 @@
{"name":"psych","version":"5.1.2","platform":"java","checksum":"1dd68dc609eddbc884e6892e11da942e16f7256bd30ebde9d35449d43043a6fe"},
{"name":"psych","version":"5.1.2","platform":"ruby","checksum":"337322f58fc2bf24827d2b9bd5ab595f6a72971867d151bb39980060ea40a368"},
{"name":"public_suffix","version":"6.0.1","platform":"ruby","checksum":"61d44e1cab5cbbbe5b31068481cf16976dd0dc1b6b07bd95617ef8c5e3e00c6f"},
{"name":"puma","version":"6.4.0","platform":"java","checksum":"eb27679e9e665882bab85dfa84704b0615b4f77cec46de014f05b90a5ab36cfe"},
{"name":"puma","version":"6.4.0","platform":"ruby","checksum":"d5dda11362744df9f4694708a62e3cfddf72eba7498c16016ebbb30f106712f9"},
{"name":"puma","version":"6.4.3","platform":"java","checksum":"373fcfacacaafd0f5a24db18cb99b3f2decb5c5316470169852559aa80adc8ab"},
{"name":"puma","version":"6.4.3","platform":"ruby","checksum":"24a4645c006811d83f2480057d1f54a96e7627b6b90e1c99b260b9dc630eb43e"},
{"name":"pyu-ruby-sasl","version":"0.0.3.3","platform":"ruby","checksum":"5683a6bc5738db5a1bf5ceddeaf545405fb241b4184dd4f2587e679a7e9497e5"},
{"name":"raabro","version":"1.4.0","platform":"ruby","checksum":"d4fa9ff5172391edb92b242eed8be802d1934b1464061ae5e70d80962c5da882"},
{"name":"racc","version":"1.6.2","platform":"java","checksum":"0880781e7dfde09e665d0b6160b583e01ed52fcc2955d7891447d33c2d1d2cf1"},

View File

@ -1456,7 +1456,7 @@ GEM
psych (5.1.2)
stringio
public_suffix (6.0.1)
puma (6.4.0)
puma (6.4.3)
nio4r (~> 2.0)
pyu-ruby-sasl (0.0.3.3)
raabro (1.4.0)
@ -2247,7 +2247,7 @@ DEPENDENCIES
pry-byebug
pry-rails (~> 0.3.9)
pry-shell (~> 0.6.4)
puma (= 6.4.0)
puma (= 6.4.3)
rack (~> 2.2.9)
rack-attack (~> 6.7.0)
rack-cors (~> 2.0.1)

View File

@ -9,6 +9,7 @@ export const BRANCH_REF_TYPE = 'heads';
export const TAG_REF_TYPE = 'tags';
export const TAG_REF_TYPE_ICON = 'tag';
export const BRANCH_REF_TYPE_ICON = 'branch';
export const SEARCH_ICON = 'search';
export const REF_TYPE_PARAM_NAME = 'ref_type';
export const X_TOTAL_HEADER = 'x-total';

View File

@ -1,9 +1,10 @@
<script>
// eslint-disable-next-line no-restricted-imports
import { mapGetters } from 'vuex';
import { mapGetters, mapState } from 'vuex';
import StatusFilter from './status_filter/index.vue';
import FiltersTemplate from './filters_template.vue';
import ArchivedFilter from './archived_filter/index.vue';
import SourceBranchFilter from './source_branch_filter/index.vue';
export default {
name: 'MergeRequestsFilters',
@ -11,9 +12,15 @@ export default {
StatusFilter,
FiltersTemplate,
ArchivedFilter,
SourceBranchFilter,
},
computed: {
...mapGetters(['hasProjectContext']),
...mapState(['groupInitialJson']),
shouldShowSourceBranchFilter() {
// this will be changed https://gitlab.com/gitlab-org/gitlab/-/issues/480740
return !this.hasProjectContext || this.groupInitialJson?.id;
},
},
};
</script>
@ -22,5 +29,6 @@ export default {
<filters-template>
<status-filter class="gl-mb-5" />
<archived-filter v-if="hasProjectContext" class="gl-mb-5" />
<source-branch-filter v-if="shouldShowSourceBranchFilter" class="gl-mb-5" />
</filters-template>
</template>

View File

@ -0,0 +1,143 @@
<script>
import { GlIcon, GlCollapsibleListbox } from '@gitlab/ui';
import { debounce } from 'lodash';
import { s__ } from '~/locale';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import { SEARCH_ICON } from '../../constants';
export default {
name: 'BranchDropdown',
components: {
GlIcon,
GlCollapsibleListbox,
},
props: {
sourceBranches: {
type: Array,
required: true,
},
errors: {
type: Array,
required: false,
default: () => [],
},
headerText: {
type: String,
required: true,
},
searchBranchText: {
type: String,
required: true,
},
selectedBranch: {
type: String,
required: false,
default: '',
},
icon: {
type: String,
required: false,
default: SEARCH_ICON,
},
isLoading: {
type: Boolean,
required: false,
default: false,
},
},
i18n: {
noSearchResultsText: s__('GlobalSearch|No matching results'),
noLoadResultsText: s__('GlobalSearch|No results found'),
},
data() {
return {
selectedRef: '',
query: '',
};
},
computed: {
extendedToggleButtonClass() {
return [
{
'!gl-shadow-inner-1-red-500': this.hasError,
'gl-font-monospace': Boolean(this.selectedBranch),
},
'gl-mb-0',
];
},
isSearching() {
return this.query.length > 0;
},
dropdownItems() {
return this.isSearching ? this.searchResults : this.sourceBranches;
},
noResultsText() {
return this.isSearching
? this.$options.i18n.noSearchResultsText
: this.$options.i18n.noLoadResultsText;
},
},
created() {
this.debouncedSearch = debounce(this.search, DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
},
methods: {
onSearchBoxInput(searchQuery = '') {
this.query = searchQuery?.trim();
this.debouncedSearch();
},
search() {
if (!this.query) {
this.searchResults = [];
return;
}
this.searchResults = this.sourceBranches.filter((branch) => branch.text.includes(this.query));
},
selectRef(ref) {
this.$emit('selected', ref);
},
},
};
</script>
<template>
<div>
<gl-collapsible-listbox
class="ref-selector gl-w-full"
block
searchable
resetable
:selected="selectedBranch"
:header-text="s__('GlobalSearch|Source branch')"
:items="dropdownItems"
:no-results-text="noResultsText"
:searching="isLoading"
:search-placeholder="searchBranchText"
:toggle-class="extendedToggleButtonClass"
:toggle-text="searchBranchText"
:icon="icon"
:loading="isLoading"
:reset-button-label="s__('GlobalSearch|Reset')"
v-bind="$attrs"
v-on="$listeners"
@hidden="$emit('hide')"
@search="onSearchBoxInput"
@select="selectRef"
@reset="$emit('reset')"
>
<template #list-item="{ item }">
{{ item.text }}
</template>
<template #footer>
<div
v-for="errorMessage in errors"
:key="errorMessage"
data-testid="branch-dropdown-error-list"
class="gl-mx-4 gl-my-3 gl-flex gl-items-start gl-text-red-500"
>
<gl-icon name="error" class="gl-mr-2 gl-mt-2 gl-shrink-0" />
<span>{{ errorMessage }}</span>
</div>
</template>
</gl-collapsible-listbox>
</div>
</template>

View File

@ -0,0 +1,146 @@
<script>
// eslint-disable-next-line no-restricted-imports
import { mapActions, mapState } from 'vuex';
import { GlFormCheckbox, GlTooltipDirective } from '@gitlab/ui';
import { s__ } from '~/locale';
import AjaxCache from '~/lib/utils/ajax_cache';
import { mergeUrlParams } from '~/lib/utils/url_utility';
import { InternalEvents } from '~/tracking';
import BranchDropdown from '~/search/sidebar/components/shared/branch_dropdown.vue';
import { BRANCH_REF_TYPE_ICON } from '~/ref/constants';
import {
SEARCH_ICON,
EVENT_SELECT_SOURCE_BRANCH_FILTER_ON_MERGE_REQUEST_PAGE,
} from '../../constants';
const trackingMixin = InternalEvents.mixin();
export const SOURCE_BRANCH_PARAM = 'source_branch';
export const NOT_SOURCE_BRANCH_PARAM = 'not[source_branch]';
export const SOURCE_BRANCH_ENDPOINT_PATH = '/-/autocomplete/merge_request_source_branches.json';
export default {
name: 'SourceBranchFilter',
components: {
BranchDropdown,
GlFormCheckbox,
},
directives: {
GlTooltip: GlTooltipDirective,
},
mixins: [trackingMixin],
data() {
return {
sourceBranches: [],
errors: [],
toggleState: false,
selectedBranch: '',
isLoading: false,
};
},
i18n: {
toggleTooltip: s__('GlobalSearch|Toggle if results have source branch included or excluded'),
},
computed: {
...mapState(['groupInitialJson', 'projectInitialJson', 'query']),
showDropdownPlaceholderText() {
return !this.selectedBranch ? s__('GlobalSearch|Search') : this.selectedBranch;
},
showDropdownPlaceholderIcon() {
return !this.selectedBranch ? SEARCH_ICON : BRANCH_REF_TYPE_ICON;
},
},
mounted() {
this.selectedBranch =
this.query?.[SOURCE_BRANCH_PARAM] || this.query?.[NOT_SOURCE_BRANCH_PARAM];
this.toggleState = Boolean(this.query?.[NOT_SOURCE_BRANCH_PARAM]);
},
methods: {
...mapActions(['setQuery', 'applyQuery']),
getMergeRequestSourceBranchesEndpoint() {
const endpoint = `${gon.relative_url_root || ''}${SOURCE_BRANCH_ENDPOINT_PATH}`;
const params = {
group_id: this.groupInitialJson?.id || null,
project_id: this.projectInitialJson?.id || null,
};
return mergeUrlParams(params, endpoint);
},
convertToListboxItems(data) {
return data.map((item) => ({
text: item.title,
value: item.title,
}));
},
async getCachedSourceBranches() {
this.isLoading = true;
try {
const data = await AjaxCache.retrieve(this.getMergeRequestSourceBranchesEndpoint());
this.errors = [];
this.isLoading = false;
this.sourceBranches = this.convertToListboxItems(data);
} catch (e) {
this.isLoading = false;
this.errors.push(e.message);
}
},
handleSelected(ref) {
this.selectedBranch = ref;
if (this.toggleState) {
this.setQuery({ key: SOURCE_BRANCH_PARAM, value: '' });
this.setQuery({ key: NOT_SOURCE_BRANCH_PARAM, value: ref });
this.trackEvent(EVENT_SELECT_SOURCE_BRANCH_FILTER_ON_MERGE_REQUEST_PAGE, {
label: 'exclude',
});
return;
}
this.setQuery({ key: SOURCE_BRANCH_PARAM, value: ref });
this.setQuery({ key: NOT_SOURCE_BRANCH_PARAM, value: '' });
this.trackEvent(EVENT_SELECT_SOURCE_BRANCH_FILTER_ON_MERGE_REQUEST_PAGE, {
label: 'include',
});
},
changeCheckboxInput(state) {
this.toggleState = state;
this.handleSelected(this.selectedBranch);
},
handleReset() {
this.toggleState = false;
this.setQuery({ key: SOURCE_BRANCH_PARAM, value: '' });
this.setQuery({ key: NOT_SOURCE_BRANCH_PARAM, value: '' });
this.applyQuery();
},
},
SOURCE_BRANCH_PARAM,
};
</script>
<template>
<div class="gl-relative gl-pb-0 md:gl-pt-0">
<div class="gl-mb-2 gl-text-sm gl-font-bold" data-testid="source-branch-filter-title">
{{ s__('GlobalSearch|Source branch') }}
</div>
<branch-dropdown
:source-branches="sourceBranches"
:errors="errors"
:header-text="s__('GlobalSearch|Source branch')"
:search-branch-text="showDropdownPlaceholderText"
:selected-branch="selectedBranch"
:icon="showDropdownPlaceholderIcon"
:is-loading="isLoading"
@selected="handleSelected"
@shown="getCachedSourceBranches"
@reset="handleReset"
/>
<gl-form-checkbox
v-model="toggleState"
class="gl-inline-flex gl-w-full gl-grow gl-justify-between"
@input="changeCheckboxInput"
>
<span v-gl-tooltip="$options.i18n.toggleTooltip" data-testid="branch">
{{ s__('GlobalSearch|Branch not included') }}
</span>
</gl-form-checkbox>
</div>
</template>

View File

@ -28,6 +28,8 @@ export const SEARCH_TYPE_BASIC = 'basic';
export const SEARCH_TYPE_ADVANCED = 'advanced';
export const SEARCH_TYPE_ZOEKT = 'zoekt';
export const SEARCH_ICON = 'search';
export const ANY_OPTION = {
id: null,
name: __('Any'),
@ -50,3 +52,8 @@ export const PROJECT_DATA = {
export const EVENT_CLICK_ZOEKT_INCLUDE_FORKS_ON_SEARCH_RESULTS_PAGE =
'click_zoekt_include_forks_on_search_results_page';
export const EVENT_SELECT_SOURCE_BRANCH_FILTER = 'select_source_branch_filter';
export const EVENT_SELECT_SOURCE_BRANCH_FILTER_ON_MERGE_REQUEST_PAGE =
'select_source_branch_filter_on_merge_request_page';

View File

@ -1,3 +1,4 @@
import { omitBy } from 'lodash';
import Api from '~/api';
import { createAlert } from '~/alert';
import axios from '~/lib/utils/axios_utils';
@ -124,7 +125,8 @@ export const setQuery = ({ state, commit, getters }, { key, value }) => {
};
export const applyQuery = ({ state }) => {
visitUrl(setUrlParams({ ...state.query, page: null }, window.location.href, false, true));
const query = omitBy(state.query, (item) => item === '');
visitUrl(setUrlParams({ ...query, page: null }, window.location.href, true, true));
};
export const resetQuery = ({ state }) => {

View File

@ -5,6 +5,10 @@ import { LABEL_FILTER_PARAM } from '~/search/sidebar/components/label_filter/dat
import { archivedFilterData } from '~/search/sidebar/components/archived_filter/data';
import { INCLUDE_FORKED_FILTER_PARAM } from '~/search/sidebar/components/forks_filter/index.vue';
import { s__ } from '~/locale';
import {
SOURCE_BRANCH_PARAM,
NOT_SOURCE_BRANCH_PARAM,
} from '~/search/sidebar/components/source_branch_filter/index.vue';
export const MAX_FREQUENT_ITEMS = 5;
@ -21,6 +25,8 @@ export const SIDEBAR_PARAMS = [
LABEL_FILTER_PARAM,
archivedFilterData.filterParam,
INCLUDE_FORKED_FILTER_PARAM,
SOURCE_BRANCH_PARAM,
NOT_SOURCE_BRANCH_PARAM,
];
export const REGEX_PARAM = 'regex';

View File

@ -321,6 +321,10 @@
.description {
line-height: 1.5;
max-height: $gl-spacing-scale-8;
a {
word-break: break-all;
}
}
}

View File

@ -301,7 +301,7 @@ class Commit
end
def lazy_author
BatchLoader.for(author_email.downcase).batch do |emails, loader|
BatchLoader.for(author_email&.downcase).batch do |emails, loader|
users = User.by_any_email(emails, confirmed: true).includes(:emails)
emails.each do |email|
@ -317,7 +317,7 @@ class Commit
lazy_author&.itself
end
end
request_cache(:author) { author_email.downcase }
request_cache(:author) { author_email&.downcase }
def committer(confirmed: true)
@committer ||= User.find_by_any_email(committer_email, confirmed: confirmed)

View File

@ -9,6 +9,8 @@ module Packages
has_many :conan_recipe_revisions, inverse_of: :package, class_name: 'Packages::Conan::RecipeRevision'
has_many :conan_package_references, inverse_of: :package, class_name: 'Packages::Conan::PackageReference'
accepts_nested_attributes_for :conan_metadatum
delegate :recipe, :recipe_path, to: :conan_metadatum, prefix: :conan

View File

@ -0,0 +1,39 @@
# frozen_string_literal: true
module Packages
module Conan
class PackageReference < ApplicationRecord
include ShaAttribute
REFERENCE_LENGTH_MAX = 20
MAX_INFO_SIZE = 20_000
sha_attribute :reference
belongs_to :package, class_name: 'Packages::Conan::Package',
inverse_of: :conan_package_references
belongs_to :recipe_revision, class_name: 'Packages::Conan::RecipeRevision',
inverse_of: :conan_package_references
belongs_to :project
validates :package, :project, presence: true
validates :reference, presence: true, bytesize: { maximum: -> { REFERENCE_LENGTH_MAX } },
uniqueness: { scope: [:package_id, :recipe_revision_id] }
validates :info, json_schema: { filename: 'conan_package_info', detail_errors: true }
validate :ensure_info_size
private
def ensure_info_size
return if info.to_s.size <= MAX_INFO_SIZE
errors.add(:info, :too_large,
message: format(
_('conaninfo is too large. Maximum size is %{max_size} characters'),
max_size: MAX_INFO_SIZE
)
)
end
end
end
end

View File

@ -12,6 +12,9 @@ module Packages
belongs_to :package, class_name: 'Packages::Conan::Package', inverse_of: :conan_recipe_revisions
belongs_to :project
has_many :conan_package_references, inverse_of: :recipe_revision,
class_name: 'Packages::Conan::PackageReference'
validates :package, :project, presence: true
validates :revision, presence: true, bytesize: { maximum: -> { REVISION_LENGTH_MAX } },
uniqueness: { scope: :package_id }

View File

@ -805,6 +805,8 @@ class User < ApplicationRecord
# @param emails [String, Array<String>] email addresses to check
# @param confirmed [Boolean] Only return users where the primary email is confirmed
def by_any_email(emails, confirmed: false)
return none if Array(emails).all?(&:nil?)
from_users = by_user_email(emails)
from_users = from_users.confirmed if confirmed

View File

@ -10,6 +10,9 @@ module VirtualRegistries
belongs_to :group
belongs_to :upstream, class_name: 'VirtualRegistries::Packages::Maven::Upstream', inverse_of: :cached_responses
# Used in destroying stale cached responses in DestroyOrphanCachedResponsesWorker
enum :status, default: 0, processing: 1, error: 3
validates :group, top_level_group: true, presence: true
validates :relative_path,
:object_storage_key,
@ -35,6 +38,12 @@ module VirtualRegistries
scope :search_by_relative_path, ->(query) do
fuzzy_search(query, [:relative_path], use_minimum_char_limit: false)
end
scope :orphan, -> { where(upstream: nil) }
scope :pending_destruction, -> { orphan.default }
def self.next_pending_destruction
pending_destruction.lock('FOR UPDATE SKIP LOCKED').take
end
# create or update a cached response identified by the upstream, group_id and relative_path
# Given that we have chances that this function is not executed in isolation, we can't use

View File

@ -0,0 +1,33 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "Conan Info Schema",
"oneOf": [
{
"type": "object",
"additionalProperties": false
},
{
"type": "object",
"properties": {
"settings": {
"type": "object"
},
"requires": {
"type": "array",
"items": {
"type": "string"
}
},
"options": {
"type": "object"
}
},
"required": [
"settings",
"requires",
"options"
],
"additionalProperties": true
}
]
}

View File

@ -33,7 +33,6 @@
= render 'groups/settings/lfs', f: f
= render_if_exists 'groups/settings/auto_assign_duo_pro', f: f, group: @group
= render_if_exists 'groups/settings/duo_features_enabled', f: f, group: @group
= render_if_exists 'groups/settings/experimental_settings', f: f, group: @group
= render_if_exists 'groups/settings/product_analytics_settings', f: f, group: @group
= render 'groups/settings/git_access_protocols', f: f, group: @group
= render 'groups/settings/project_creation_level', f: f, group: @group

View File

@ -300,15 +300,6 @@
:weight: 1
:idempotent: true
:tags: []
- :name: cronjob:ci_platform_metrics_update_cron
:worker_name: CiPlatformMetricsUpdateCronWorker
:feature_category: :continuous_integration
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :cpu
:weight: 1
:idempotent: false
:tags: []
- :name: cronjob:ci_runners_reconcile_existing_runner_versions_cron
:worker_name: Ci::Runners::ReconcileExistingRunnerVersionsCronWorker
:feature_category: :fleet_visibility
@ -1038,6 +1029,15 @@
:weight: 1
:idempotent: true
:tags: []
- :name: dependency_proxy_blob:virtual_registries_packages_destroy_orphan_cached_responses
:worker_name: VirtualRegistries::Packages::DestroyOrphanCachedResponsesWorker
:feature_category: :virtual_registry
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: dependency_proxy_manifest:dependency_proxy_cleanup_manifest
:worker_name: DependencyProxy::CleanupManifestWorker
:feature_category: :virtual_registry

View File

@ -1,18 +0,0 @@
# frozen_string_literal: true
class CiPlatformMetricsUpdateCronWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
data_consistency :always
# This worker does not perform work scoped to a context
include CronjobQueue # rubocop:disable Scalability/CronWorkerContext
feature_category :continuous_integration
urgency :low
worker_resource_boundary :cpu
def perform
# no-op
end
end

View File

@ -13,6 +13,7 @@ module DependencyProxy
def perform
enqueue_blob_cleanup_job if DependencyProxy::Blob.pending_destruction.any?
enqueue_manifest_cleanup_job if DependencyProxy::Manifest.pending_destruction.any?
enqueue_vreg_packages_cached_response_cleanup_job
end
private
@ -24,5 +25,13 @@ module DependencyProxy
def enqueue_manifest_cleanup_job
DependencyProxy::CleanupManifestWorker.perform_with_capacity
end
def enqueue_vreg_packages_cached_response_cleanup_job
[::VirtualRegistries::Packages::Maven::CachedResponse].each do |klass|
if klass.pending_destruction.any?
::VirtualRegistries::Packages::DestroyOrphanCachedResponsesWorker.perform_with_capacity(klass.name)
end
end
end
end
end

View File

@ -0,0 +1,72 @@
# frozen_string_literal: true
module VirtualRegistries
module Packages
class DestroyOrphanCachedResponsesWorker
include ApplicationWorker
include LimitedCapacity::Worker
MAX_CAPACITY = 2
data_consistency :sticky
urgency :low
idempotent!
queue_namespace :dependency_proxy_blob
feature_category :virtual_registry
def perform_work(model)
next_item = next_item(model.constantize)
return unless next_item
next_item.destroy!
log_metadata(next_item)
rescue StandardError => exception
next_item&.update_column(:status, :error) unless next_item&.destroyed?
Gitlab::ErrorTracking.log_exception(
exception,
class: self.class.name
)
end
def remaining_work_count(model)
model.constantize.pending_destruction.limit(max_running_jobs + 1).count
end
def max_running_jobs
MAX_CAPACITY
end
private
def next_item(klass)
klass.transaction do
next_item = klass.next_pending_destruction
if next_item
next_item.update_column(:status, :processing)
log_cleanup_item(next_item)
end
next_item
end
end
def log_metadata(cached_response)
log_extra_metadata_on_done(:cached_response_id, cached_response.id)
log_extra_metadata_on_done(:group_id, cached_response.group_id)
log_extra_metadata_on_done(:relative_path, cached_response.relative_path)
end
def log_cleanup_item(cached_response)
logger.info(
structured_payload(
cached_response_id: cached_response.id
)
)
end
end
end
end

View File

@ -0,0 +1,21 @@
---
description: Merge request source branch filter
internal_events: true
action: select_source_branch_filter_on_merge_request_page
identifiers:
- project
- namespace
- user
additional_properties:
label:
description: include or exclude
product_group: global_search
milestone: '17.5'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/156491
distributions:
- ce
- ee
tiers:
- free
- premium
- ultimate

View File

@ -592,10 +592,6 @@ production: &base
schedule_migrate_external_diffs_worker:
cron: "15 * * * *"
# Update CI Platform Metrics daily
ci_platform_metrics_update_cron_worker:
cron: "47 9 * * *"
# Periodically update ci_runner_versions table with up-to-date versions and status.
ci_runner_versions_reconciliation_worker:
cron: "@daily"

View File

@ -0,0 +1,77 @@
# frozen_string_literal: true
return unless Gitlab::Runtime.puma?
require 'puma'
require 'puma/cluster'
# Ruby 3.1 and 3.2 have bugs that prevents Puma from reaping child processes properly:
# https://bugs.ruby-lang.org/issues/20490
# https://bugs.ruby-lang.org/issues/19837
#
# https://github.com/puma/puma/pull/3314 fixes this in Puma, but a release
# has not been forthcoming.
if Gem::Version.new(Puma::Const::PUMA_VERSION) > Gem::Version.new('6.5')
raise 'This patch should not be needed after Puma 6.5.0.'
end
# rubocop:disable Style/RedundantBegin -- These are upstream changes
# rubocop:disable Cop/LineBreakAfterGuardClauses -- These are upstream changes
# rubocop:disable Layout/EmptyLineAfterGuardClause -- These are upstream changes
module Puma
class Cluster < Runner
# loops thru @workers, removing workers that exited, and calling
# `#term` if needed
def wait_workers
# Reap all children, known workers or otherwise.
# If puma has PID 1, as it's common in containerized environments,
# then it's responsible for reaping orphaned processes, so we must reap
# all our dead children, regardless of whether they are workers we spawned
# or some reattached processes.
reaped_children = {}
loop do
begin
pid, status = Process.wait2(-1, Process::WNOHANG)
break unless pid
reaped_children[pid] = status
rescue Errno::ECHILD
break
end
end
@workers.reject! do |w|
next false if w.pid.nil?
begin
# We may need to check the PID individually because:
# 1. From Ruby versions 2.6 to 3.2, `Process.detach` can prevent or delay
# `Process.wait2(-1)` from detecting a terminated process: https://bugs.ruby-lang.org/issues/19837.
# 2. When `fork_worker` is enabled, some worker may not be direct children,
# but grand children. Because of this they won't be reaped by `Process.wait2(-1)`.
if reaped_children.delete(w.pid) || Process.wait(w.pid, Process::WNOHANG)
true
else
w.term if w.term?
nil
end
rescue Errno::ECHILD
begin
Process.kill(0, w.pid)
# child still alive but has another parent (e.g., using fork_worker)
w.term if w.term?
false
rescue Errno::ESRCH, Errno::EPERM
true # child is already terminated
end
end
end
# Log unknown children
reaped_children.each do |pid, status|
log "! reaped unknown child process pid=#{pid} status=#{status}"
end
end
end
end
# rubocop:enable Style/RedundantBegin
# rubocop:enable Cop/LineBreakAfterGuardClauses
# rubocop:enable Layout/EmptyLineAfterGuardClause

View File

@ -0,0 +1,24 @@
---
key_path: redis_hll_counters.count_distinct_user_id_from_select_source_branch_exclude_filter_on_merge_request_page_monthly
description: Monthly count of unique users who selected source branch exlude option
product_group: global_search
performance_indicator_type: []
value_type: number
status: active
milestone: '17.5'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/156491
time_frame: 28d
data_source: internal_events
data_category: optional
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
events:
- name: select_source_branch_filter_on_merge_request_page
unique: user.id
filter:
label: exclude

View File

@ -0,0 +1,24 @@
---
key_path: redis_hll_counters.count_distinct_user_id_from_select_source_branch_include_filter_on_merge_request_page_monthly
description: Monthly count of unique users who selected source branch with include option
product_group: global_search
performance_indicator_type: []
value_type: number
status: active
milestone: '17.5'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/156491
time_frame: 28d
data_source: internal_events
data_category: optional
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
events:
- name: select_source_branch_filter_on_merge_request_page
unique: user.id
filter:
label: include

View File

@ -0,0 +1,24 @@
---
key_path: redis_hll_counters.count_distinct_user_id_from_select_source_branch_exclude_filter_on_merge_request_page_weekly
description: Weekly count of unique users who selected source branch exlude option
product_group: global_search
performance_indicator_type: []
value_type: number
status: active
milestone: '17.5'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/156491
time_frame: 7d
data_source: internal_events
data_category: optional
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
events:
- name: select_source_branch_filter_on_merge_request_page
unique: user.id
filter:
label: exclude

View File

@ -0,0 +1,24 @@
---
key_path: redis_hll_counters.count_distinct_user_id_from_select_source_branch_include_filter_on_merge_request_page_weekly
description: Weekly count of unique users who selected source branch with include option
product_group: global_search
performance_indicator_type: []
value_type: number
status: active
milestone: '17.5'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/156491
time_frame: 7d
data_source: internal_events
data_category: optional
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
events:
- name: select_source_branch_filter_on_merge_request_page
unique: user.id
filter:
label: include

View File

@ -0,0 +1,12 @@
---
table_name: packages_conan_package_references
classes:
- Packages::Conan::PackageReference
feature_categories:
- package_registry
description: Conan package references
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/166443
milestone: '17.5'
gitlab_schema: gitlab_main_cell
sharding_key:
project_id: projects

View File

@ -9,6 +9,7 @@ classes:
- Packages::Helm::Package
- Packages::MlModel::Package
- Packages::Package
- Packages::Pypi::Package
- Packages::Rpm::Package
- Packages::Rubygems::Package
feature_categories:

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddStatusToVirtualRegistriesPackagesMavenCachedResponses < Gitlab::Database::Migration[2.2]
milestone '17.5'
def change
add_column :virtual_registries_packages_maven_cached_responses, :status, :smallint, default: 0, null: false
end
end

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
class CreatePackagesConanPackageReferences < Gitlab::Database::Migration[2.2]
milestone '17.5'
UNIQ_IND_PACKAGE_REVISION_REF = 'uniq_idx_on_packages_conan_package_references_package_reference'
CONSTRAINT_NAME = 'chk_conan_references_info_length'
def up
create_table :packages_conan_package_references do |t| # rubocop:disable Migration/EnsureFactoryForTable -- https://gitlab.com/gitlab-org/gitlab/-/issues/468630
t.bigint :package_id, null: false
t.bigint :project_id, null: false
t.bigint :recipe_revision_id
t.timestamps_with_timezone null: false
t.binary :reference, null: false, limit: 20 # A SHA-1 hash (20 bytes)
t.jsonb :info, default: {}, null: false
t.index :project_id
t.index :recipe_revision_id
t.index [:package_id, :recipe_revision_id, :reference], unique: true, name: UNIQ_IND_PACKAGE_REVISION_REF
t.check_constraint "char_length(info::text) <= 20000", name: CONSTRAINT_NAME
end
end
def down
drop_table :packages_conan_package_references
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
# See https://docs.gitlab.com/ee/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddPackageIdAsForeignKeyInPackagesConanPackageReferences < Gitlab::Database::Migration[2.2]
milestone '17.5'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :packages_conan_package_references, :packages_packages, column: :package_id,
on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :packages_conan_package_references, column: :package_id
end
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddProjectIdAsForeignKeyInPackagesConanPackageReferences < Gitlab::Database::Migration[2.2]
milestone '17.5'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :packages_conan_package_references, :projects, column: :project_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :packages_conan_package_references, column: :project_id
end
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class AddRecipeRevisionIdAsForeignKeyInPackagesConanPackageReferences < Gitlab::Database::Migration[2.2]
milestone '17.5'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :packages_conan_package_references, :packages_conan_recipe_revisions,
column: :recipe_revision_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :packages_conan_package_references, column: :recipe_revision_id
end
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class RemoveCiPlatformMetricsUpdateCronWorkerJobInstances < Gitlab::Database::Migration[2.2]
milestone '17.5'
disable_ddl_transaction!
DEPRECATED_JOB_CLASSES = %w[
CiPlatformMetricsUpdateCronWorker
]
def up
sidekiq_remove_jobs(job_klasses: DEPRECATED_JOB_CLASSES)
end
def down
# This migration removes any instances of deprecated workers and cannot be undone.
end
end

View File

@ -0,0 +1,38 @@
# frozen_string_literal: true
class RemoveConanInfoColumnsInPackagesConanMetadata < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.5'
def up
with_lock_retries do
remove_column :packages_conan_metadata, :os, if_exists: true
remove_column :packages_conan_metadata, :architecture, if_exists: true
remove_column :packages_conan_metadata, :build_type, if_exists: true
remove_column :packages_conan_metadata, :compiler, if_exists: true
remove_column :packages_conan_metadata, :compiler_version, if_exists: true
remove_column :packages_conan_metadata, :compiler_libcxx, if_exists: true
remove_column :packages_conan_metadata, :compiler_cppstd, if_exists: true
end
end
def down
with_lock_retries do
add_column :packages_conan_metadata, :os, :text, if_not_exists: true
add_column :packages_conan_metadata, :architecture, :text, if_not_exists: true
add_column :packages_conan_metadata, :build_type, :text, if_not_exists: true
add_column :packages_conan_metadata, :compiler, :text, if_not_exists: true
add_column :packages_conan_metadata, :compiler_version, :text, if_not_exists: true
add_column :packages_conan_metadata, :compiler_libcxx, :text, if_not_exists: true
add_column :packages_conan_metadata, :compiler_cppstd, :text, if_not_exists: true
end
add_text_limit :packages_conan_metadata, :os, 32
add_text_limit :packages_conan_metadata, :architecture, 32
add_text_limit :packages_conan_metadata, :build_type, 32
add_text_limit :packages_conan_metadata, :compiler, 32
add_text_limit :packages_conan_metadata, :compiler_version, 16
add_text_limit :packages_conan_metadata, :compiler_libcxx, 32
add_text_limit :packages_conan_metadata, :compiler_cppstd, 32
end
end

View File

@ -0,0 +1 @@
9e550c57e33c3e3118416b9ab3728e8b53d54168012cd5aef40c748285b561bc

View File

@ -0,0 +1 @@
3e8ba4ec7cc1d5a3ac4927cc47ae458ad044553697d1e9d01e1029080c3d5505

View File

@ -0,0 +1 @@
ea088526f189d7db821bd04f90c7699faa9673de6d3c3345b57c8e6dbceaee66

View File

@ -0,0 +1 @@
df306ba517e589c537b76c855f4829adc28e4f4b164137ade5a9a1c7a18f40bc

View File

@ -0,0 +1 @@
055a99f9018a2a0f3f39670561ad84a5d792bdc3b9ca763bd9850253926db48f

View File

@ -0,0 +1 @@
d8344419bd22f1b91b93afc7ea84e11a898f8c653296fae319bc8b9a3d9ac9eb

View File

@ -0,0 +1 @@
e1c3d82e4e7e607484f94d7a6f03508930478cdc21edbf05a9d674b7be337831

View File

@ -14790,21 +14790,7 @@ CREATE TABLE packages_conan_metadata (
updated_at timestamp with time zone NOT NULL,
package_username character varying(255) NOT NULL,
package_channel character varying(255) NOT NULL,
project_id bigint,
os text,
architecture text,
build_type text,
compiler text,
compiler_version text,
compiler_libcxx text,
compiler_cppstd text,
CONSTRAINT check_15f3356ff2 CHECK ((char_length(architecture) <= 32)),
CONSTRAINT check_3dc474bc51 CHECK ((char_length(compiler_version) <= 16)),
CONSTRAINT check_52abd85dde CHECK ((char_length(compiler_libcxx) <= 32)),
CONSTRAINT check_535bd0bf5b CHECK ((char_length(os) <= 32)),
CONSTRAINT check_a0b998cb1b CHECK ((char_length(build_type) <= 32)),
CONSTRAINT check_e57d0def27 CHECK ((char_length(compiler_cppstd) <= 32)),
CONSTRAINT check_e7f03884b8 CHECK ((char_length(compiler) <= 32))
project_id bigint
);
CREATE SEQUENCE packages_conan_metadata_id_seq
@ -14816,6 +14802,27 @@ CREATE SEQUENCE packages_conan_metadata_id_seq
ALTER SEQUENCE packages_conan_metadata_id_seq OWNED BY packages_conan_metadata.id;
CREATE TABLE packages_conan_package_references (
id bigint NOT NULL,
package_id bigint NOT NULL,
project_id bigint NOT NULL,
recipe_revision_id bigint,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
reference bytea NOT NULL,
info jsonb DEFAULT '{}'::jsonb NOT NULL,
CONSTRAINT chk_conan_references_info_length CHECK ((char_length((info)::text) <= 20000))
);
CREATE SEQUENCE packages_conan_package_references_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE packages_conan_package_references_id_seq OWNED BY packages_conan_package_references.id;
CREATE TABLE packages_conan_recipe_revisions (
id bigint NOT NULL,
package_id bigint NOT NULL,
@ -19775,6 +19782,7 @@ CREATE TABLE virtual_registries_packages_maven_cached_responses (
object_storage_key text NOT NULL,
upstream_etag text,
content_type text DEFAULT 'application/octet-stream'::text NOT NULL,
status smallint DEFAULT 0 NOT NULL,
CONSTRAINT check_28c64d513d CHECK ((char_length(object_storage_key) <= 255)),
CONSTRAINT check_30b7e853d9 CHECK ((char_length(upstream_etag) <= 255)),
CONSTRAINT check_68b105cda6 CHECK ((char_length(file) <= 255)),
@ -22271,6 +22279,8 @@ ALTER TABLE ONLY packages_conan_file_metadata ALTER COLUMN id SET DEFAULT nextva
ALTER TABLE ONLY packages_conan_metadata ALTER COLUMN id SET DEFAULT nextval('packages_conan_metadata_id_seq'::regclass);
ALTER TABLE ONLY packages_conan_package_references ALTER COLUMN id SET DEFAULT nextval('packages_conan_package_references_id_seq'::regclass);
ALTER TABLE ONLY packages_conan_recipe_revisions ALTER COLUMN id SET DEFAULT nextval('packages_conan_recipe_revisions_id_seq'::regclass);
ALTER TABLE ONLY packages_debian_group_architectures ALTER COLUMN id SET DEFAULT nextval('packages_debian_group_architectures_id_seq'::regclass);
@ -24736,6 +24746,9 @@ ALTER TABLE ONLY packages_conan_file_metadata
ALTER TABLE ONLY packages_conan_metadata
ADD CONSTRAINT packages_conan_metadata_pkey PRIMARY KEY (id);
ALTER TABLE ONLY packages_conan_package_references
ADD CONSTRAINT packages_conan_package_references_pkey PRIMARY KEY (id);
ALTER TABLE ONLY packages_conan_recipe_revisions
ADD CONSTRAINT packages_conan_recipe_revisions_pkey PRIMARY KEY (id);
@ -29702,6 +29715,10 @@ CREATE UNIQUE INDEX index_packages_conan_metadata_on_package_id_username_channel
CREATE INDEX index_packages_conan_metadata_on_project_id ON packages_conan_metadata USING btree (project_id);
CREATE INDEX index_packages_conan_package_references_on_project_id ON packages_conan_package_references USING btree (project_id);
CREATE INDEX index_packages_conan_package_references_on_recipe_revision_id ON packages_conan_package_references USING btree (recipe_revision_id);
CREATE INDEX index_packages_conan_recipe_revisions_on_project_id ON packages_conan_recipe_revisions USING btree (project_id);
CREATE INDEX index_packages_debian_group_architectures_on_group_id ON packages_debian_group_architectures USING btree (group_id);
@ -31400,6 +31417,8 @@ CREATE UNIQUE INDEX uniq_audit_instance_event_filters_destination_id_and_event_t
CREATE UNIQUE INDEX uniq_google_cloud_logging_configuration_namespace_id_and_name ON audit_events_google_cloud_logging_configurations USING btree (namespace_id, name);
CREATE UNIQUE INDEX uniq_idx_on_packages_conan_package_references_package_reference ON packages_conan_package_references USING btree (package_id, recipe_revision_id, reference);
CREATE UNIQUE INDEX uniq_idx_packages_packages_on_project_id_name_version_ml_model ON packages_packages USING btree (project_id, name, version) WHERE ((package_type = 14) AND (status <> 4));
CREATE UNIQUE INDEX uniq_idx_project_compliance_framework_on_project_framework ON project_compliance_framework_settings USING btree (project_id, framework_id);
@ -34058,6 +34077,9 @@ ALTER TABLE ONLY import_source_users
ALTER TABLE ONLY integrations
ADD CONSTRAINT fk_71cce407f9 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY packages_conan_package_references
ADD CONSTRAINT fk_7210467bfc FOREIGN KEY (package_id) REFERENCES packages_packages(id) ON DELETE CASCADE;
ALTER TABLE ONLY subscription_user_add_on_assignments
ADD CONSTRAINT fk_724c2df9a8 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
@ -34472,6 +34494,9 @@ ALTER TABLE ONLY compliance_management_frameworks
ALTER TABLE ONLY ml_experiment_metadata
ADD CONSTRAINT fk_b764e76c6c FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY packages_conan_package_references
ADD CONSTRAINT fk_b7c05e1b1c FOREIGN KEY (recipe_revision_id) REFERENCES packages_conan_recipe_revisions(id) ON DELETE CASCADE;
ALTER TABLE ONLY external_status_checks_protected_branches
ADD CONSTRAINT fk_b7d788e813 FOREIGN KEY (protected_branch_id) REFERENCES protected_branches(id) ON DELETE CASCADE;
@ -34784,6 +34809,9 @@ ALTER TABLE ONLY namespaces
ALTER TABLE ONLY fork_networks
ADD CONSTRAINT fk_e7b436b2b5 FOREIGN KEY (root_project_id) REFERENCES projects(id) ON DELETE SET NULL;
ALTER TABLE ONLY packages_conan_package_references
ADD CONSTRAINT fk_e7b5f3afc7 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY error_tracking_error_events
ADD CONSTRAINT fk_e84882273e FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;

View File

@ -103,11 +103,18 @@ Backup and restore recreates the entire database, including the indexes.
1. In all PostgreSQL nodes, install the new GitLab package of the same GitLab version.
1. In a [database console](../troubleshooting/postgresql.md#start-a-database-console), rebuild all indexes:
```shell
```sql
SET statement_timeout = 0;
REINDEX DATABASE gitlabhq_production;
```
1. After reindexing the database, the version must be refreshed for all affected collations.
To update the system catalog to record the current collation version:
```sql
ALTER COLLATION <collation_name> REFRESH VERSION;
```
1. In all nodes, start GitLab.
**Advantages**:
@ -136,11 +143,18 @@ Backup and restore recreates the entire database, including the indexes.
1. In the primary site, in a
[database console](../troubleshooting/postgresql.md#start-a-database-console), rebuild all indexes:
```shell
```sql
SET statement_timeout = 0;
REINDEX DATABASE gitlabhq_production;
```
1. After reindexing the database, the version must be refreshed for all affected collations.
To update the system catalog to record the current collation version:
```sql
ALTER COLLATION <collation_name> REFRESH VERSION;
```
1. If the secondary sites receive traffic from users, then let the read-replica databases catch up
before starting GitLab.
1. In all nodes of all sites, start GitLab.
@ -165,7 +179,7 @@ different types of indexes were handled, see the blog post about
1. [Determine which indexes are affected](https://wiki.postgresql.org/wiki/Locale_data_changes#What_indexes_are_affected).
1. In a [database console](../troubleshooting/postgresql.md#start-a-database-console), reindex each affected index:
```shell
```sql
SET statement_timeout = 0;
REINDEX INDEX <index name> CONCURRENTLY;
```
@ -173,8 +187,8 @@ different types of indexes were handled, see the blog post about
1. After reindexing bad indexes, the collation must be refreshed. To update the system catalog to
record the current collation version:
```shell
ALTER COLLATION <collation_name> REFRESH VERSION
```sql
ALTER COLLATION <collation_name> REFRESH VERSION;
```
1. In all nodes, start GitLab.
@ -206,7 +220,7 @@ different types of indexes were handled, see the blog post about
1. In the primary site, in a
[database console](../troubleshooting/postgresql.md#start-a-database-console), reindex each affected index:
```shell
```sql
SET statement_timeout = 0;
REINDEX INDEX <index name> CONCURRENTLY;
```
@ -214,8 +228,8 @@ different types of indexes were handled, see the blog post about
1. After reindexing bad indexes, the collation must be refreshed. To update the system catalog to
record the current collation version:
```shell
ALTER COLLATION <collation_name> REFRESH VERSION
```sql
ALTER COLLATION <collation_name> REFRESH VERSION;
```
1. The existing PostgreSQL streaming replication should replicate the reindex changes to the

View File

@ -10,7 +10,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
DETAILS:
**Tier:** For a limited time, Premium and Ultimate. In the future, [GitLab Duo Enterprise](../../subscriptions/subscription-add-ons.md).
**Offering:** Self-managed
**Status:** Experiment
**Status:** Beta
> - [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/12972) in GitLab 17.1 [with a flag](../../administration/feature_flags.md) named `ai_custom_model`. Disabled by default.
@ -18,16 +18,6 @@ FLAG:
The availability of this feature is controlled by a feature flag.
For more information, see the history.
WARNING:
This feature is considered [experimental](../../policy/experiment-beta-support.md) and is not intended for customer usage outside of initial design partners. We expect major changes to this feature.
DISCLAIMER:
This page contains information related to upcoming products, features, and functionality.
It is important to note that the information presented is for informational purposes only.
Please do not rely on this information for purchasing or planning purposes.
The development, release, and timing of any products, features, or functionality may be subject to change or delay and remain at the
sole discretion of GitLab Inc.
To configure your GitLab instance to access the available self-hosted models in your infrastructure:
1. Configure the self-hosted model.

View File

@ -10,7 +10,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
DETAILS:
**Tier:** For a limited time, Premium and Ultimate. In the future, [GitLab Duo Enterprise](../../subscriptions/subscription-add-ons.md).
**Offering:** Self-managed
**Status:** Experiment
**Status:** Beta
> - [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/12972) in GitLab 17.1 [with a flag](../../administration/feature_flags.md) named `ai_custom_model`. Disabled by default.
@ -18,15 +18,6 @@ FLAG:
The availability of this feature is controlled by a feature flag.
For more information, see the history.
WARNING:
This feature is considered [experimental](../../policy/experiment-beta-support.md) and is not intended for customer usage outside of initial design partners. We expect major changes to this feature.
DISCLAIMER:
This page contains information related to upcoming products, features, and functionality.
It is important to note that the information presented is for informational purposes only.
Please do not rely on this information for purchasing or planning purposes.
The development, release, and timing of any products, features, or functionality may be subject to change or delay and remain at the sole discretion of GitLab Inc.
When you deploy a self-hosted model, you can:
- Manage the end-to-end transmission of requests to enterprise-hosted large
@ -59,7 +50,7 @@ feature. For more information about this offering, see
To deploy a self-hosted large language model:
1. [Set up your self-hosted model deployment infrastructure](../../administration/self_hosted_models/install_infrastructure.md) and connect it to your GitLab instance.
1. [Set up your self-hosted model infrastructure](../../administration/self_hosted_models/install_infrastructure.md) and connect it to your GitLab instance.
1. [Configure your GitLab instance to access self-hosted models](../../administration/self_hosted_models/configure_duo_features.md) using instance and group settings.
## Self-hosted models compared to the default GitLab AI vendor architecture

View File

@ -1,16 +1,16 @@
---
stage: AI-Powered
group: Custom Models
description: Setup your self-hosted model deployment infrastructure
description: Set up your self-hosted model infrastructure
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Set up your self-hosted model deployment infrastructure
# Set up your self-hosted model infrastructure
DETAILS:
**Tier:** For a limited time, Premium and Ultimate. In the future, [GitLab Duo Enterprise](../../subscriptions/subscription-add-ons.md).
**Offering:** Self-managed
**Status:** Experiment
**Status:** Beta
> - [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/12972) in GitLab 17.1 [with a flag](../../administration/feature_flags.md) named `ai_custom_model`. Disabled by default.
@ -18,19 +18,10 @@ FLAG:
The availability of this feature is controlled by a feature flag.
For more information, see the history.
WARNING:
This feature is considered [experimental](../../policy/experiment-beta-support.md) and is not intended for customer usage outside of initial design partners. We expect major changes to this feature.
DISCLAIMER:
This page contains information related to upcoming products, features, and functionality.
It is important to note that the information presented is for informational purposes only.
Please do not rely on this information for purchasing or planning purposes.
The development, release, and timing of any products, features, or functionality may be subject to change or delay and remain at the sole discretion of GitLab Inc.
By self-hosting the model, AI Gateway, and GitLab instance, there are no calls to
external architecture, ensuring maximum levels of security.
To set up your self-hosted model deployment infrastructure:
To set up your self-hosted model infrastructure:
1. Install the large language model (LLM) serving infrastructure.
1. Configure your GitLab instance.

View File

@ -1,7 +1,7 @@
---
stage: AI-Powered
group: Custom Models
description: Troubleshooting tips for deploying self-hosted model deployment
description: Troubleshooting tips for deploying self-hosted models
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---

View File

@ -1009,6 +1009,7 @@ the following are the names of GitLab Duo features:
- GitLab Duo Merge Request Summary
- GitLab Duo Product Analytics
- GitLab Duo Root Cause Analysis
- GitLab Duo Self-Hosted Models
- GitLab Duo Test Generation
- GitLab Duo Vulnerability Explanation
- GitLab Duo Vulnerability Resolution
@ -2002,6 +2003,16 @@ Use **self-hosted model** (lowercase) to refer to a language model that's hosted
The language model might be an LLM (large language model), but it might not be.
## Self-Hosted Models
Use title case for the **GitLab Duo Self-Hosted Models** feature.
On first mention on a page, use **GitLab Duo Self-Hosted Models**.
Thereafter, use **Self-Hosted Models** by itself.
This phrase applies when specifically referring to the feature name only.
If you're writing about [self-hosted models](#self-hosted-model), no need to use title case.
## self-managed
Use **self-managed** to refer to a customer's installation of GitLab. Do not use **self-hosted**.

View File

@ -148,6 +148,16 @@ DETAILS:
## Beta features
### Self-Hosted Models
DETAILS:
**Tier:** For a limited time, Premium and Ultimate. In the future, [GitLab Duo Enterprise](../../subscriptions/subscription-add-ons.md).
**Offering:** Self-managed
**Status:** Beta
- Host a GitLab-approved model that's different from the default.
- [View documentation](../../administration/self_hosted_models/index.md).
### Merge Request Summary
DETAILS:

View File

@ -142,17 +142,52 @@ To use GitLab Duo Chat in the GitLab Duo plugin for JetBrains IDEs:
1. In the JetBrains marketplace, download and install the [GitLab Duo plugin](../../editor_extensions/jetbrains_ide/index.md#download-the-extension).
1. Configure the [GitLab Duo plugin](../../editor_extensions/jetbrains_ide/index.md#configure-the-extension).
1. In a JetBrains IDE, open a project.
1. Open Chat by using one of the following methods:
- On the right tool window bar, select **GitLab Duo Chat**.
- Use a keyboard shortcut: <kbd>ALT</kbd> + <kbd>d</kbd> on Windows and Linux, or
<kbd>Option</kbd> + <kbd>d</kbd> on macOS.
- In the file that you have open in the editor:
1. Optional. Select some code.
1. Right-click and select **GitLab Duo Chat**.
1. Select **Open Chat Window**.
1. Select **Explain Code**, **Generate Tests**, or **Refactor Code**.
- Add keyboard or mouse shortcuts for each action under **Keymap** in the **Settings**.
1. In the message box, enter your question and press **Enter** or select **Send**.
1. Open GitLab Duo Chat in either a chat window or an editor window:
### In a chat window
To open GitLab Duo Chat in a chat window, use any of these methods:
- On the right tool window bar, by selecting **GitLab Duo Chat**.
- From a keyboard shortcut, by pressing:
- MacOS: <kbd>Option</kbd> + <kbd>d</kbd>
- Windows and Linux: <kbd>ALT</kbd> + <kbd>d</kbd>
- In the file that you have open in the editor:
1. Optional. Select some code.
1. Right-click and select **GitLab Duo Chat**.
1. Select **Open Chat Window**.
1. Select **Explain Code**, **Generate Tests**, or **Refactor Code**.
- Adding keyboard or mouse shortcuts for each action under **Keymap** in the **Settings**.
After GitLab Duo Chat opens:
1. In the message box, enter your question. The available commands are shown while you enter text:
- Enter `/` to display all available commands.
- Enter `/re` to display `/refactor` and `/reset`.
1. To send your question, press **Enter** or select **Send**.
1. Use the buttons within code blocks in the responses to interact with them.
### In the editor window
> - [Generally available](https://gitlab.com/groups/gitlab-org/editor-extensions/-/epics/80) in GitLab Duo 3.0.0.
To open GitLab Duo Chat in the editor window, use any of these methods:
- From a keyboard shortcut, by pressing:
- MacOS: <kbd>Option</kbd> + <kbd>c</kbd>
- Windows and Linux: <kbd>ALT</kbd> + <kbd>c</kbd>
- In the currently open file in your IDE, by selecting some code,
then, in the floating toolbar, selecting **GitLab Duo Quick Chat** (**{tanuki-ai}**).
- Right-clicking, then selecting **GitLab Duo Chat > Open Quick Chat**.
After Quick Chat opens:
1. In the message box, enter your question. The available commands are shown while you enter text:
- Enter `/` to display all available commands.
- Enter `/re` to display `/refactor` and `/reset`.
1. To send your question, press **Enter**.
1. Use the buttons around code blocks in the responses to interact with them.
1. To exit chat, either select **Escape to close**, or press **Escape** while focused on the chat.
## Watch a demo and get tips

View File

@ -10,7 +10,13 @@ module Gitlab
# @attr [String] stderr
# @attr [Array<Process::Status>] status_list
# @attr [Float] duration
Result = Struct.new(:stderr, :status_list, :duration, keyword_init: true)
Result = Struct.new(:stderr, :status_list, :duration, keyword_init: true) do
def success?
return false unless status_list&.any?
status_list.map(&:success?).all?
end
end
attr_reader :shell_commands
@ -24,7 +30,7 @@ module Gitlab
# @param [IO|String|Array] input stdin redirection
# @param [IO|String|Array] output stdout redirection
# @return [Pipeline::Result]
def run_pipeline!(input: nil, output: nil)
def run!(input: nil, output: nil)
start = Time.now
# Open3 writes on `err_write` and we receive from `err_read`
err_read, err_write = IO.pipe
@ -43,7 +49,11 @@ module Gitlab
stderr = err_read.read
err_read.close # close after reading to avoid leaking file descriptors
Result.new(stderr: stderr, status_list: status_list, duration: duration)
Result.new(
stderr: stderr,
status_list: status_list,
duration: duration
)
end
private

View File

@ -4,6 +4,8 @@ RSpec.describe Gitlab::Backup::Cli::Shell::Pipeline do
let(:command) { Gitlab::Backup::Cli::Shell::Command }
let(:printf_command) { command.new('printf "3\n2\n1"') }
let(:sort_command) { command.new('sort') }
let(:true_command) { command.new('true') }
let(:false_command) { command.new('false') }
subject(:pipeline) { described_class }
@ -19,11 +21,9 @@ RSpec.describe Gitlab::Backup::Cli::Shell::Pipeline do
end
end
describe '#run_pipeline!' do
it 'returns a Pipeline::Status' do
true_command = command.new('true')
result = pipeline.new(true_command, true_command).run_pipeline!
describe '#run!' do
it 'returns a Pipeline::Result' do
result = pipeline.new(true_command, true_command).run!
expect(result).to be_a(Gitlab::Backup::Cli::Shell::Pipeline::Result)
end
@ -33,24 +33,20 @@ RSpec.describe Gitlab::Backup::Cli::Shell::Pipeline do
expected_output = 'my custom error content'
err_command = command.new("echo #{expected_output} > /dev/stderr")
result = pipeline.new(err_command).run_pipeline!
result = pipeline.new(err_command).run!
expect(result.stderr.chomp).to eq(expected_output)
end
it 'includes a list of Process::Status from the executed pipeline' do
true_command = command.new('true')
result = pipeline.new(true_command, true_command).run_pipeline!
result = pipeline.new(true_command, true_command).run!
expect(result.status_list).to all be_a(Process::Status)
expect(result.status_list).to all respond_to(:exited?, :termsig, :stopsig, :exitstatus, :success?, :pid)
end
it 'includes a list of Process::Status that handles exit signals' do
false_command = command.new('false')
result = pipeline.new(false_command, false_command).run_pipeline!
result = pipeline.new(false_command, false_command).run!
expect(result.status_list).to all satisfy { |status| !status.success? }
expect(result.status_list).to all satisfy { |status| status.exitstatus == 1 }
@ -66,7 +62,7 @@ RSpec.describe Gitlab::Backup::Cli::Shell::Pipeline do
output_r, output_w = IO.pipe
result = pipeline.new(echo_command).run_pipeline!(input: input_r, output: output_w)
result = pipeline.new(echo_command).run!(input: input_r, output: output_w)
input_r.close
output_w.close
@ -78,4 +74,44 @@ RSpec.describe Gitlab::Backup::Cli::Shell::Pipeline do
expect(output).to match(/stdin is : my custom content/)
end
end
describe Gitlab::Backup::Cli::Shell::Pipeline::Result do
describe '#success?' do
context 'when one of multiple commands is unsuccessful' do
it 'returns false' do
expect(Gitlab::Backup::Cli::Shell::Pipeline.new(true_command, false_command).run!.success?).to be false
end
end
context 'when all commands are successful' do
it 'returns true' do
expect(Gitlab::Backup::Cli::Shell::Pipeline.new(true_command, true_command).run!.success?).to be true
end
end
context 'when there is no result' do
let(:result) { described_class.new(status_list: nil) }
it 'returns false' do
expect(result.success?).to be false
end
end
context 'when there is no status list' do
let(:result) { described_class.new }
it 'returns false' do
expect(result.success?).to be false
end
end
context 'when there are no status results' do
let(:result) { described_class.new(status_list: []) }
it 'returns false' do
expect(result.success?).to be false
end
end
end
end
end

View File

@ -58,7 +58,7 @@ module Backup
target_directory: backup_files_realpath,
target: '.',
excludes: excludes)
result = shell_pipeline.new(tar_command, compress_command).run_pipeline!(output: archive_file)
result = shell_pipeline.new(tar_command, compress_command).run!(output: archive_file)
FileUtils.rm_rf(backup_files_realpath)
else
@ -69,7 +69,7 @@ module Backup
target: '.',
excludes: excludes)
result = shell_pipeline.new(tar_command, compress_command).run_pipeline!(output: archive_file)
result = shell_pipeline.new(tar_command, compress_command).run!(output: archive_file)
end
success = pipeline_succeeded?(
@ -94,7 +94,7 @@ module Backup
archive_file: USE_STDIN,
target_directory: storage_realpath)
result = shell_pipeline.new(decompress_command, tar_command).run_pipeline!(input: archive_file)
result = shell_pipeline.new(decompress_command, tar_command).run!(input: archive_file)
success = pipeline_succeeded?(
compress_status: result.status_list[0],

View File

@ -75,7 +75,7 @@ namespace :tw do
# CodeOwnerRule.new('Respond', ''),
CodeOwnerRule.new('Runner', '@rsarangadharan'),
CodeOwnerRule.new('Hosted Runners', '@rsarangadharan'),
CodeOwnerRule.new('Security Policies', '@rdickenson'),
CodeOwnerRule.new('Security Policies', '@rlehmann1'),
CodeOwnerRule.new('Secret Detection', '@rdickenson'),
CodeOwnerRule.new('Solutions Architecture', '@jfullam @brianwald @Darwinjs'),
CodeOwnerRule.new('Source Code', '@brendan777'),
@ -97,7 +97,7 @@ namespace :tw do
'@gitlab-org/analytics-section/product-analytics/engineers/frontend ' \
'@gitlab-org/analytics-section/analytics-instrumentation/engineers'),
CodeOwnerRule.new('Authentication', '@gitlab-org/govern/authentication/approvers'),
CodeOwnerRule.new('Authorization', '@gitlab-org/govern/authorization/approvers'),
CodeOwnerRule.new('Authorization', '@rlehmann1'),
CodeOwnerRule.new('Compliance',
'@gitlab-org/govern/security-policies-frontend @gitlab-org/govern/threat-insights-frontend-team ' \
'@gitlab-org/govern/threat-insights-backend-team'),

View File

@ -25045,6 +25045,9 @@ msgstr ""
msgid "GlobalSearch|Archived"
msgstr ""
msgid "GlobalSearch|Branch not included"
msgstr ""
msgid "GlobalSearch|Change context %{kbdStart}↵%{kbdEnd}"
msgstr ""
@ -25168,6 +25171,9 @@ msgstr ""
msgid "GlobalSearch|No labels found"
msgstr ""
msgid "GlobalSearch|No matching results"
msgstr ""
msgid "GlobalSearch|No results found"
msgstr ""
@ -25219,6 +25225,9 @@ msgstr ""
msgid "GlobalSearch|Results updated. %{count} results available. Use the up and down arrow keys to navigate search results list, or ENTER to submit."
msgstr ""
msgid "GlobalSearch|Search"
msgstr ""
msgid "GlobalSearch|Search %{kbdStart}↵%{kbdEnd}"
msgstr ""
@ -25279,6 +25288,9 @@ msgstr ""
msgid "GlobalSearch|Snippets"
msgstr ""
msgid "GlobalSearch|Source branch"
msgstr ""
msgid "GlobalSearch|The search term must be at least 3 characters long."
msgstr ""
@ -25288,6 +25300,9 @@ msgstr ""
msgid "GlobalSearch|Tip:"
msgstr ""
msgid "GlobalSearch|Toggle if results have source branch included or excluded"
msgstr ""
msgid "GlobalSearch|Type %{kbdOpen}/%{kbdClose} to search"
msgstr ""
@ -26529,9 +26544,6 @@ msgstr ""
msgid "GroupSettings|Organizations and contacts can be created and associated with issues."
msgstr ""
msgid "GroupSettings|Participate in the %{link_start}GitLab Early Access Program%{link_end}."
msgstr ""
msgid "GroupSettings|Please choose a group URL with no special characters or spaces."
msgstr ""
@ -48908,6 +48920,9 @@ msgstr ""
msgid "SecurityExclusions|Add exclusion"
msgstr ""
msgid "SecurityExclusions|Delete exclusion"
msgstr ""
msgid "SecurityExclusions|Description"
msgstr ""
@ -48929,9 +48944,24 @@ msgstr ""
msgid "SecurityExclusions|Enter one or more rules to ignore, separated by line breaks."
msgstr ""
msgid "SecurityExclusions|Exclusion deleted successfully."
msgstr ""
msgid "SecurityExclusions|Exclusion disabled successfully."
msgstr ""
msgid "SecurityExclusions|Exclusion enabled successfully."
msgstr ""
msgid "SecurityExclusions|Exclusion has been created successfully"
msgstr ""
msgid "SecurityExclusions|Failed to delete the exclusion:"
msgstr ""
msgid "SecurityExclusions|Failed to update the exclusion:"
msgstr ""
msgid "SecurityExclusions|File or directory location"
msgstr ""
@ -48985,6 +49015,9 @@ msgstr ""
msgid "SecurityExclusions|Value"
msgstr ""
msgid "SecurityExclusions|You are about to delete the %{type} `%{value}` from the secret detection exclusions. Are you sure you want to continue?"
msgstr ""
msgid "SecurityExclusions|ex: This secret is used for testing"
msgstr ""
@ -64513,6 +64546,9 @@ msgstr ""
msgid "compliance violation has already been recorded"
msgstr ""
msgid "conaninfo is too large. Maximum size is %{max_size} characters"
msgstr ""
msgid "contacts can only be added to root groups"
msgstr ""

View File

@ -74,7 +74,7 @@
"@gitlab/fonts": "^1.3.0",
"@gitlab/query-language": "^0.0.5-a-20240903",
"@gitlab/svgs": "3.117.0",
"@gitlab/ui": "94.0.0",
"@gitlab/ui": "94.0.1",
"@gitlab/web-ide": "^0.0.1-dev-20240909013227",
"@mattiasbuelens/web-streams-adapter": "^0.1.0",
"@rails/actioncable": "7.0.8-4",

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
FactoryBot.define do
factory :conan_package_reference, class: 'Packages::Conan::PackageReference' do
package { association(:conan_package) }
project { association(:project) }
recipe_revision { association(:conan_recipe_revision) }
info do
{
settings: { os: 'Linux', arch: 'x86_64' },
requires: ['libA/1.0@user/testing'],
options: { fPIC: true },
otherProperties: 'some_value'
}
end
sequence(:reference) { |n| Digest::SHA1.digest(n.to_s) } # rubocop:disable Fips/SHA1 -- The conan registry is not FIPS compliant
end
end

View File

@ -5,7 +5,7 @@ FactoryBot.define do
class: 'VirtualRegistries::Packages::Maven::CachedResponse' do
upstream { association :virtual_registries_packages_maven_upstream }
group { upstream.group }
relative_path { |n| "/a/relative/path/test-#{n}.txt" }
sequence(:relative_path) { |n| "/a/relative/path/test-#{n}.txt" }
size { 1.kilobyte }
upstream_etag { OpenSSL::Digest.hexdigest('SHA256', 'test') }
content_type { 'text/plain' }
@ -24,5 +24,11 @@ FactoryBot.define do
upstream_checked_at { 30.minutes.ago }
upstream_etag { 'test' }
end
trait :orphan do
after(:create) do |entry|
entry.update_attribute(:upstream_id, nil)
end
end
end
end

View File

@ -1680,3 +1680,18 @@ export const mockDataForBlobBody = {
projectPath: 'Testjs/Test',
__typename: 'SearchBlobFileType',
};
export const mockSourceBranches = [
{
text: 'master',
value: 'master',
},
{
text: 'feature',
value: 'feature',
},
{
text: 'develop',
value: 'develop',
},
];

View File

@ -0,0 +1,113 @@
import { shallowMount } from '@vue/test-utils';
import { GlCollapsibleListbox, GlListboxItem, GlIcon } from '@gitlab/ui';
import BranchDropdown from '~/search/sidebar/components/shared/branch_dropdown.vue';
import waitForPromises from 'helpers/wait_for_promises';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import { mockSourceBranches } from 'jest/search/mock_data';
describe('BranchDropdown', () => {
let wrapper;
const defaultProps = {
sourceBranches: mockSourceBranches,
errors: [],
headerText: 'Source branch',
searchBranchText: 'Search source branch',
selectedBranch: 'master',
icon: 'branch',
isLoading: false,
};
const createComponent = (props = {}, options = {}) => {
wrapper = shallowMount(BranchDropdown, {
propsData: {
...defaultProps,
...props,
},
stubs: {
GlCollapsibleListbox,
GlIcon,
},
...options,
});
};
const findGlCollapsibleListbox = () => wrapper.findComponent(GlCollapsibleListbox);
const findGlListboxItems = () => wrapper.findAllComponents(GlListboxItem);
const findErrorMessages = () => wrapper.findAll('[data-testid="branch-dropdown-error-list"]');
describe('when nothing is selected', () => {
beforeEach(() => {
createComponent();
});
it('renders the GlCollapsibleListbox component with correct props', () => {
const toggleClass = [
{
'!gl-shadow-inner-1-red-500': undefined,
'gl-font-monospace': true,
},
'gl-mb-0',
];
// This is a workaround for "Property or method `nodeType` is not defined"
// https://docs.gitlab.com/ee/development/fe_guide/troubleshooting.html#property-or-method-nodetype-is-not-defined-but-youre-not-using-nodetype-anywhere
// usual workourounds didn't work so I had to do following:
const props = findGlCollapsibleListbox().props();
expect(props.selected).toBe('master');
expect(props.headerText).toBe('Source branch');
expect(props.items).toMatchObject(mockSourceBranches);
expect(props.noResultsText).toBe('No results found');
expect(props.searching).toBe(false);
expect(props.searchPlaceholder).toBe('Search source branch');
expect(props.toggleClass).toMatchObject(toggleClass);
expect(props.toggleText).toBe('Search source branch');
expect(props.icon).toBe('branch');
expect(props.loading).toBe(false);
expect(props.resetButtonLabel).toBe('Reset');
});
it('renders error messages when errors prop is passed', async () => {
const errors = ['Error 1', 'Error 2'];
createComponent({ errors });
await waitForPromises();
const errorMessages = findErrorMessages();
expect(errorMessages.length).toBe(errors.length);
errorMessages.wrappers.forEach((errorWrapper, index) => {
expect(errorWrapper.text()).toContain(errors[index]);
});
});
it('search filters items', async () => {
findGlCollapsibleListbox().vm.$emit('search', 'fea');
jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
await waitForPromises();
expect(findGlListboxItems()).toHaveLength(1);
});
it('emits hide', () => {
findGlCollapsibleListbox().vm.$emit('hidden');
expect(wrapper.emitted('hide')).toStrictEqual([[]]);
});
it('emits selected', () => {
findGlCollapsibleListbox().vm.$emit('select', 'main');
expect(wrapper.emitted('selected')).toStrictEqual([['main']]);
});
it('emits reset', () => {
findGlCollapsibleListbox().vm.$emit('reset');
expect(wrapper.emitted('reset')).toStrictEqual([[]]);
});
});
});

View File

@ -6,6 +6,7 @@ import { MOCK_QUERY } from 'jest/search/mock_data';
import MergeRequestsFilters from '~/search/sidebar/components/merge_requests_filters.vue';
import StatusFilter from '~/search/sidebar/components/status_filter/index.vue';
import ArchivedFilter from '~/search/sidebar/components/archived_filter/index.vue';
import SourceBranchFilter from '~/search/sidebar/components/source_branch_filter/index.vue';
import { SEARCH_TYPE_ADVANCED, SEARCH_TYPE_BASIC } from '~/search/sidebar/constants';
Vue.use(Vuex);
@ -23,6 +24,9 @@ describe('GlobalSearch MergeRequestsFilters', () => {
state: {
urlQuery: MOCK_QUERY,
searchType: SEARCH_TYPE_ADVANCED,
groupInitialJson: {
id: 1,
},
...initialState,
},
getters: defaultGetters,
@ -35,6 +39,7 @@ describe('GlobalSearch MergeRequestsFilters', () => {
const findStatusFilter = () => wrapper.findComponent(StatusFilter);
const findArchivedFilter = () => wrapper.findComponent(ArchivedFilter);
const findSourceBranchFilter = () => wrapper.findComponent(SourceBranchFilter);
describe('Renders correctly with Archived Filter', () => {
beforeEach(() => {
@ -48,6 +53,10 @@ describe('GlobalSearch MergeRequestsFilters', () => {
it('renders ArchivedFilter', () => {
expect(findArchivedFilter().exists()).toBe(true);
});
it('renders sourceBranchFilter', () => {
expect(findSourceBranchFilter().exists()).toBe(true);
});
});
describe('Renders correctly with basic search', () => {
@ -62,6 +71,10 @@ describe('GlobalSearch MergeRequestsFilters', () => {
it('renders ArchivedFilter', () => {
expect(findArchivedFilter().exists()).toBe(true);
});
it('renders sourceBranchFilter', () => {
expect(findSourceBranchFilter().exists()).toBe(true);
});
});
describe('hasProjectContext getter', () => {

View File

@ -0,0 +1,161 @@
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
// eslint-disable-next-line no-restricted-imports
import Vuex from 'vuex';
import MockAdapter from 'axios-mock-adapter';
import { GlFormCheckbox } from '@gitlab/ui';
import AjaxCache from '~/lib/utils/ajax_cache';
import axios from '~/lib/utils/axios_utils';
import SourceBranchFilter from '~/search/sidebar/components/source_branch_filter/index.vue';
import BranchDropdown from '~/search/sidebar/components/shared/branch_dropdown.vue';
import { useMockInternalEventsTracking } from 'helpers/tracking_internal_events_helper';
import { MOCK_QUERY } from 'jest/search/mock_data';
Vue.use(Vuex);
describe('Source branch filter', () => {
let wrapper;
let mock;
const actions = {
setQuery: jest.fn(),
applyQuery: jest.fn(),
};
const defaultState = {
query: {
scope: 'merge_requests',
group_id: 1,
search: '*',
},
};
const createComponent = (state) => {
const store = new Vuex.Store({
...defaultState,
state,
actions,
});
wrapper = shallowMount(SourceBranchFilter, {
store,
});
};
const findBranchDropdown = () => wrapper.findComponent(BranchDropdown);
const findGlFormCheckbox = () => wrapper.findComponent(GlFormCheckbox);
describe('when nothing is selected', () => {
beforeEach(() => {
createComponent();
});
it('renders the component', () => {
expect(findBranchDropdown().exists()).toBe(true);
expect(findGlFormCheckbox().exists()).toBe(true);
});
});
describe('when everything is selected', () => {
beforeEach(() => {
createComponent({
query: {
...MOCK_QUERY,
'not[source_branch]': 'feature',
},
});
});
it('renders the component with selected options', () => {
expect(findBranchDropdown().props('selectedBranch')).toBe('feature');
expect(findGlFormCheckbox().attributes('checked')).toBe('true');
});
it('displays the correct placeholder text and icon', () => {
expect(findBranchDropdown().props('searchBranchText')).toBe('feature');
expect(findBranchDropdown().props('icon')).toBe('branch');
});
});
describe('when opening dropdown', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
jest.spyOn(axios, 'get');
jest.spyOn(AjaxCache, 'retrieve');
createComponent({
groupInitialJson: {
id: 1,
full_name: 'gitlab-org/gitlab-test',
full_path: 'gitlab-org/gitlab-test',
},
});
});
afterEach(() => {
mock.restore();
});
it('calls AjaxCache with correct params', () => {
findBranchDropdown().vm.$emit('shown');
expect(AjaxCache.retrieve).toHaveBeenCalledWith(
'/-/autocomplete/merge_request_source_branches.json?group_id=1',
);
});
});
describe.each(['source_branch', 'not[source_branch]'])(
'when selecting a branch with and withouth toggle',
(paramName) => {
const { bindInternalEventDocument } = useMockInternalEventsTracking();
beforeEach(() => {
createComponent({
query: {
...MOCK_QUERY,
[paramName]: 'feature',
},
});
});
it(`calls setQuery with correct param ${paramName}`, () => {
const { trackEventSpy } = bindInternalEventDocument(wrapper.element);
findBranchDropdown().vm.$emit('selected', 'feature');
expect(actions.setQuery).toHaveBeenCalledWith(expect.anything(), {
key: paramName,
value: 'feature',
});
expect(trackEventSpy).toHaveBeenCalledWith(
'select_source_branch_filter_on_merge_request_page',
{
label: paramName === 'not[source_branch]' ? 'exclude' : 'include',
},
undefined,
);
});
},
);
describe('when reseting selected branch', () => {
beforeEach(() => {
createComponent();
});
it(`calls setQuery with correct param`, () => {
findBranchDropdown().vm.$emit('reset');
expect(actions.setQuery).toHaveBeenCalledWith(expect.anything(), {
key: 'source_branch',
value: '',
});
expect(actions.setQuery).toHaveBeenCalledWith(expect.anything(), {
key: 'not[source_branch]',
value: '',
});
expect(actions.applyQuery).toHaveBeenCalled();
});
});
});

View File

@ -1,14 +1,20 @@
import { mount } from '@vue/test-utils';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import locale from '~/locale';
import Translate from '~/vue_shared/translate';
import Component from './translate_spec.vue';
Vue.use(Translate);
describe('Vue translate filter', () => {
const createTranslationMock = (key, ...translations) => {
locale.textdomain('app');
let oldDomain;
let oldData;
beforeAll(() => {
oldDomain = locale.textdomain();
oldData = locale.options.locale_data;
locale.textdomain('app');
locale.options.locale_data = {
app: {
'': {
@ -16,172 +22,32 @@ describe('Vue translate filter', () => {
lang: 'vo',
plural_forms: 'nplurals=2; plural=(n != 1);',
},
[key]: translations,
singular: ['singular_translated'],
plural: ['plural_singular translation', 'plural_multiple translation'],
'%d day': ['%d singular translated', '%d plural translated'],
'Context|Foobar': ['Context|Foobar translated'],
'multiline string': ['multiline string translated'],
'multiline plural': ['multiline string singular', 'multiline string plural'],
'Context| multiline string': ['multiline string with context'],
},
};
};
it('translate singular text (`__`)', () => {
const key = 'singular';
const translation = 'singular_translated';
createTranslationMock(key, translation);
const wrapper = mount({
template: `
<span>
{{ __('${key}') }}
</span>
`,
});
expect(wrapper.text()).toBe(translation);
});
it('translate plural text (`n__`) without any substituting text', () => {
const key = 'plural';
const translationPlural = 'plural_multiple translation';
createTranslationMock(key, 'plural_singular translation', translationPlural);
const wrapper = mount({
template: `
<span>
{{ n__('${key}', 'plurals', 2) }}
</span>
`,
});
expect(wrapper.text()).toBe(translationPlural);
afterAll(() => {
locale.textdomain(oldDomain);
locale.options.locale_data = oldData;
});
describe('translate plural text (`n__`) with substituting %d', () => {
const key = '%d day';
it('works properly', async () => {
const wrapper = await shallowMount(Component);
beforeEach(() => {
createTranslationMock(key, '%d singular translated', '%d plural translated');
});
const { wrappers } = wrapper.findAll('span');
it('and n === 1', () => {
const wrapper = mount({
template: `
<span>
{{ n__('${key}', '%d days', 1) }}
</span>
`,
});
// Just to ensure that the rendering actually worked;
expect(wrappers.length).toBe(10);
expect(wrapper.text()).toBe('1 singular translated');
});
it('and n > 1', () => {
const wrapper = mount({
template: `
<span>
{{ n__('${key}', '%d days', 2) }}
</span>
`,
});
expect(wrapper.text()).toBe('2 plural translated');
});
});
describe('translates text with context `s__`', () => {
const key = 'Context|Foobar';
const translation = 'Context|Foobar translated';
const expectation = 'Foobar translated';
beforeEach(() => {
createTranslationMock(key, translation);
});
it('and using two parameters', () => {
const wrapper = mount({
template: `
<span>
{{ s__('Context', 'Foobar') }}
</span>
`,
});
expect(wrapper.text()).toBe(expectation);
});
it('and using the pipe syntax', () => {
const wrapper = mount({
template: `
<span>
{{ s__('${key}') }}
</span>
`,
});
expect(wrapper.text()).toBe(expectation);
});
});
it('translate multi line text', () => {
const translation = 'multiline string translated';
createTranslationMock('multiline string', translation);
const wrapper = mount({
template: `
<span>
{{ __(\`
multiline
string
\`) }}
</span>
`,
});
expect(wrapper.text()).toBe(translation);
});
it('translate pluralized multi line text', () => {
const translation = 'multiline string plural';
createTranslationMock('multiline string', 'multiline string singular', translation);
const wrapper = mount({
template: `
<span>
{{ n__(
\`
multiline
string
\`,
\`
multiline
strings
\`,
2
) }}
</span>
`,
});
expect(wrapper.text()).toBe(translation);
});
it('translate pluralized multi line text with context', () => {
const translation = 'multiline string with context';
createTranslationMock('Context| multiline string', translation);
const wrapper = mount({
template: `
<span>
{{ s__(
\`
Context|
multiline
string
\`
) }}
</span>
`,
});
expect(wrapper.text()).toBe(translation);
for (const span of wrappers) {
expect(span.text().trim()).toBe(span.attributes()['data-expected']);
}
});
});

View File

@ -0,0 +1,66 @@
<template>
<div>
<!-- singular text (`__`) -->
<span data-expected="singular_translated">
{{ __('singular') }}
</span>
<!-- plural text (`n__`) without any substituting text -->
<span data-expected="plural_singular translation">
{{ n__('plural', 'plurals', 1) }}
</span>
<span data-expected="plural_multiple translation">
{{ n__('plural', 'plurals', 2) }}
</span>
<!-- plural text (`n__`) with substituting %d -->
<span data-expected="1 singular translated">
{{ n__('%d day', '%d days', 1) }}
</span>
<span data-expected="2 plural translated">
{{ n__('%d day', '%d days', 2) }}
</span>
<!-- text with context `s__` -->
<span data-expected="Foobar translated">
{{ s__('Context', 'Foobar') }}
</span>
<span data-expected="Foobar translated">
{{ s__('Context|Foobar') }}
</span>
<!-- multi line text -->
<span data-expected="multiline string translated">
{{
__(`
multiline
string
`)
}}
</span>
<!-- pluralized line text -->
<span data-expected="multiline string plural">
{{
n__(
`
multiline
plural
`,
`
multiline
plurals
`,
2,
)
}}
</span>
<!-- multi line text with context -->
<span data-expected="multiline string with context">
{{
s__(
`
Context|
multiline
string
`,
)
}}
</span>
</div>
</template>

View File

@ -56,7 +56,7 @@ RSpec.describe Backup::Targets::Files, feature_category: :backup_restore do
it 'moves all necessary files' do
expect_next_instance_of(Gitlab::Backup::Cli::Shell::Pipeline) do |pipeline|
expect(pipeline).to receive(:run_pipeline!).and_return(pipeline_status_success)
expect(pipeline).to receive(:run!).and_return(pipeline_status_success)
end
tmp_dir = backup_basepath.join('tmp', "registry.#{Time.now.to_i}")
@ -67,7 +67,7 @@ RSpec.describe Backup::Targets::Files, feature_category: :backup_restore do
it 'raises no errors' do
expect_next_instance_of(Gitlab::Backup::Cli::Shell::Pipeline) do |pipeline|
expect(pipeline).to receive(:run_pipeline!).and_return(pipeline_status_success)
expect(pipeline).to receive(:run!).and_return(pipeline_status_success)
end
expect { files.restore('registry.tar.gz', 'backup_id') }.not_to raise_error
@ -80,7 +80,7 @@ RSpec.describe Backup::Targets::Files, feature_category: :backup_restore do
expect(tar_cmd.cmd_args).to include('--unlink-first')
expect(tar_cmd.cmd_args).to include('--recursive-unlink')
expect(pipeline).to receive(:run_pipeline!).and_return(pipeline_status_success)
expect(pipeline).to receive(:run!).and_return(pipeline_status_success)
end
files.restore('registry.tar.gz', 'backup_id')
@ -88,7 +88,7 @@ RSpec.describe Backup::Targets::Files, feature_category: :backup_restore do
it 'raises an error on failure' do
expect_next_instance_of(Gitlab::Backup::Cli::Shell::Pipeline) do |pipeline|
expect(pipeline).to receive(:run_pipeline!).and_return(pipeline_status_failed)
expect(pipeline).to receive(:run!).and_return(pipeline_status_failed)
end
expect { files.restore('registry.tar.gz', 'backup_id') }.to raise_error(/Restore operation failed:/)
@ -99,7 +99,7 @@ RSpec.describe Backup::Targets::Files, feature_category: :backup_restore do
before do
FileUtils.touch('registry.tar.gz')
allow(FileUtils).to receive(:mv).and_raise(Errno::EACCES)
allow(files).to receive(:run_pipeline!).and_return([[true, true], ''])
allow(files).to receive(:run!).and_return([[true, true], ''])
allow(files).to receive(:pipeline_succeeded?).and_return(true)
end
@ -117,7 +117,7 @@ RSpec.describe Backup::Targets::Files, feature_category: :backup_restore do
describe 'folders that are a mountpoint' do
before do
allow(FileUtils).to receive(:mv).and_raise(Errno::EBUSY)
allow(files).to receive(:run_pipeline!).and_return([[true, true], ''])
allow(files).to receive(:run!).and_return([[true, true], ''])
allow(files).to receive(:pipeline_succeeded?).and_return(true)
end
@ -140,7 +140,7 @@ RSpec.describe Backup::Targets::Files, feature_category: :backup_restore do
decompress_cmd = pipeline.shell_commands[0]
expect(decompress_cmd.cmd_args).to include('tee')
expect(pipeline).to receive(:run_pipeline!).and_return(pipeline_status_success)
expect(pipeline).to receive(:run!).and_return(pipeline_status_success)
end
expect do
@ -166,7 +166,7 @@ RSpec.describe Backup::Targets::Files, feature_category: :backup_restore do
expect(tar_cmd.cmd_args).to include('--exclude=lost+found')
expect(tar_cmd.cmd_args).to include('--exclude=./@pages.tmp')
allow(pipeline).to receive(:run_pipeline!).and_call_original
allow(pipeline).to receive(:run!).and_call_original
end
files.dump('registry.tar.gz', 'backup_id')

View File

@ -242,6 +242,15 @@ RSpec.describe Commit, feature_category: :source_code_management do
expect(recorder.count).to be_zero
end
end
context 'when author_email is nil' do
let(:git_commit) { RepoHelpers.sample_commit.tap { |c| c.author_email = nil } }
let(:commit) { described_class.new(git_commit, build(:project)) }
it 'returns nil' do
expect(commit.author).to be_nil
end
end
end
describe '#committer' do

View File

@ -0,0 +1,102 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Packages::Conan::PackageReference, type: :model, feature_category: :package_registry do
describe 'associations' do
it do
is_expected.to belong_to(:package).class_name('Packages::Conan::Package').inverse_of(:conan_package_references)
end
it do
is_expected.to belong_to(:recipe_revision).class_name('Packages::Conan::RecipeRevision')
.inverse_of(:conan_package_references)
end
it { is_expected.to belong_to(:project) }
end
describe 'validations' do
subject(:package_reference) { build(:conan_package_reference) }
it { is_expected.to validate_presence_of(:package) }
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_presence_of(:reference) }
it do
# ignore case, same revision string with different case are converted to same hexa binary
is_expected.to validate_uniqueness_of(:reference).scoped_to([:package_id,
:recipe_revision_id]).case_insensitive
end
context 'on reference' do
let(:invalid_reference) { 'a' * (Packages::Conan::PackageReference::REFERENCE_LENGTH_MAX + 1) }
context 'when the length exceeds the maximum byte size' do
it 'is not valid', :aggregate_failures do
package_reference.reference = invalid_reference
expect(package_reference).not_to be_valid
expect(package_reference.errors[:reference]).to include(
"is too long (#{Packages::Conan::PackageReference::REFERENCE_LENGTH_MAX + 1} B). " \
"The maximum size is #{Packages::Conan::PackageReference::REFERENCE_LENGTH_MAX} B.")
end
end
context 'when the length is within the byte size limit' do
it 'is valid' do
# package_reference is set correclty in the factory
expect(package_reference).to be_valid
end
end
end
context 'on info' do
subject(:package_reference) do
pr = build(:conan_package_reference)
pr.info = info if defined?(info)
pr
end
it { is_expected.to be_valid }
context 'with empty conan info' do
let(:info) { {} }
it { is_expected.to be_valid }
end
context 'with invalid conan info' do
let(:info) { { invalid_field: 'some_value' } }
it 'is invalid', :aggregate_failures do
expect(package_reference).not_to be_valid
expect(package_reference.errors[:info]).to include(
'object at root is missing required properties: settings, requires, options')
end
end
context 'when info size exceeds the maximum allowed size' do
before do
stub_const('Packages::Conan::PackageReference::MAX_INFO_SIZE', 1000)
end
let(:info) do
{
settings: { os: 'Linux', arch: 'x86_64' },
requires: ['libA/1.0@user/testing'],
options: { fPIC: true },
otherProperties: 'a' * 1001 # Simulates large data
}
end
it 'is invalid due to large size' do
expect(package_reference).not_to be_valid
expect(package_reference.errors[:info]).to include(
'conaninfo is too large. Maximum size is 1000 characters'
)
end
end
end
end
end

View File

@ -10,6 +10,11 @@ RSpec.describe Packages::Conan::Package, type: :model, feature_category: :packag
is_expected.to have_many(:conan_recipe_revisions).inverse_of(:package)
.class_name('Packages::Conan::RecipeRevision')
end
it do
is_expected.to have_many(:conan_package_references).inverse_of(:package)
.class_name('Packages::Conan::PackageReference')
end
end
describe 'validations' do

View File

@ -9,6 +9,11 @@ RSpec.describe Packages::Conan::RecipeRevision, type: :model, feature_category:
end
it { is_expected.to belong_to(:project) }
it do
is_expected.to have_many(:conan_package_references).inverse_of(:recipe_revision)
.class_name('Packages::Conan::PackageReference')
end
end
describe 'validations' do

View File

@ -48,6 +48,39 @@ RSpec.describe VirtualRegistries::Packages::Maven::CachedResponse, type: :model,
end
end
describe 'scopes' do
describe '.orphan' do
subject { described_class.orphan }
let_it_be(:cached_response) { create(:virtual_registries_packages_maven_cached_response) }
let_it_be(:orphan_cached_response) { create(:virtual_registries_packages_maven_cached_response, :orphan) }
it { is_expected.to contain_exactly(orphan_cached_response) }
end
describe '.pending_destruction' do
subject { described_class.pending_destruction }
let_it_be(:cached_response) { create(:virtual_registries_packages_maven_cached_response, :orphan, :processing) }
let_it_be(:pending_destruction_cached_response) do
create(:virtual_registries_packages_maven_cached_response, :orphan)
end
it { is_expected.to contain_exactly(pending_destruction_cached_response) }
end
end
describe '.next_pending_destruction' do
subject { described_class.next_pending_destruction }
let_it_be(:cached_response) { create(:virtual_registries_packages_maven_cached_response) }
let_it_be(:pending_destruction_cached_response) do
create(:virtual_registries_packages_maven_cached_response, :orphan)
end
it { is_expected.to eq(pending_destruction_cached_response) }
end
describe 'object storage key' do
it 'can not be null' do
cached_response.object_storage_key = nil

View File

@ -1008,7 +1008,7 @@ RSpec.describe API::VirtualRegistries::Packages::Maven, :aggregate_failures, fea
cached_response
.as_json
.merge('cached_response_id' => Base64.urlsafe_encode64(cached_response.relative_path))
.except('id', 'object_storage_key', 'file_store')
.except('id', 'object_storage_key', 'file_store', 'status')
)
end
end

View File

@ -11,9 +11,12 @@ RSpec.describe DependencyProxy::CleanupDependencyProxyWorker, feature_category:
it 'queues the cleanup jobs', :aggregate_failures do
create(:dependency_proxy_blob, :pending_destruction)
create(:dependency_proxy_manifest, :pending_destruction)
create(:virtual_registries_packages_maven_cached_response, :orphan)
expect(DependencyProxy::CleanupBlobWorker).to receive(:perform_with_capacity).twice
expect(DependencyProxy::CleanupManifestWorker).to receive(:perform_with_capacity).twice
expect(::VirtualRegistries::Packages::DestroyOrphanCachedResponsesWorker)
.to receive(:perform_with_capacity).twice
subject
end
@ -25,6 +28,8 @@ RSpec.describe DependencyProxy::CleanupDependencyProxyWorker, feature_category:
it 'does not queue the cleanup jobs', :aggregate_failures do
expect(DependencyProxy::CleanupBlobWorker).not_to receive(:perform_with_capacity)
expect(DependencyProxy::CleanupManifestWorker).not_to receive(:perform_with_capacity)
expect(::VirtualRegistries::Packages::DestroyOrphanCachedResponsesWorker)
.not_to receive(:perform_with_capacity)
subject
end

View File

@ -473,6 +473,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Vulnerabilities::Statistics::AdjustmentWorker' => 3,
'VulnerabilityExports::ExportDeletionWorker' => 3,
'VulnerabilityExports::ExportWorker' => 3,
'VirtualRegistries::Packages::DestroyOrphanCachedResponsesWorker' => 0,
'WaitForClusterCreationWorker' => 3,
'WebHookWorker' => 4,
'WebHooks::LogExecutionWorker' => 3,

View File

@ -0,0 +1,85 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe VirtualRegistries::Packages::DestroyOrphanCachedResponsesWorker, type: :worker, feature_category: :virtual_registry do
let(:worker) { described_class.new }
let(:model) { ::VirtualRegistries::Packages::Maven::CachedResponse }
it_behaves_like 'an idempotent worker' do
let(:job_args) { [model.name] }
end
it_behaves_like 'worker with data consistency', described_class, data_consistency: :sticky
it 'has a none deduplicate strategy' do
expect(described_class.get_deduplicate_strategy).to eq(:none)
end
describe '#perform_work' do
subject(:perform_work) { worker.perform_work(model.name) }
context 'with no work to do' do
it { is_expected.to be_nil }
end
context 'with work to do' do
let_it_be(:cached_response) { create(:virtual_registries_packages_maven_cached_response) }
let_it_be(:orphan_cached_response) { create(:virtual_registries_packages_maven_cached_response, :orphan) }
it 'destroys orphan cached responses' do
expect(worker).to receive(:log_extra_metadata_on_done).with(:cached_response_id, orphan_cached_response.id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:group_id, orphan_cached_response.group_id)
expect(worker).to receive(:log_extra_metadata_on_done).with(:relative_path,
orphan_cached_response.relative_path)
expect(model).to receive(:next_pending_destruction).and_call_original
expect { perform_work }.to change { model.count }.by(-1)
expect { orphan_cached_response.reset }.to raise_error(ActiveRecord::RecordNotFound)
end
context 'with an error during deletion' do
before do
allow_next_found_instance_of(model) do |instance|
allow(instance).to receive(:destroy).and_raise(StandardError)
end
end
it 'tracks the error' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
instance_of(StandardError), class: described_class.name
)
expect { perform_work }.to change { model.error.count }.by(1)
end
end
context 'when trying to update a destroyed record' do
before do
allow_next_found_instance_of(model) do |instance|
destroy_method = instance.method(:destroy!)
allow(instance).to receive(:destroy!) do
destroy_method.call
raise StandardError
end
end
end
it 'does not change the status to error' do
expect(Gitlab::ErrorTracking).to receive(:log_exception)
.with(instance_of(StandardError), class: described_class.name)
expect { perform_work }.not_to change { model.error.count }
end
end
end
end
describe '#max_running_jobs' do
let(:capacity) { described_class::MAX_CAPACITY }
subject { worker.max_running_jobs }
it { is_expected.to eq(capacity) }
end
end

View File

@ -1362,10 +1362,10 @@
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-3.117.0.tgz#a9a45949d73e91f278e019b46220feb63105dd48"
integrity sha512-nBFWh2UN+pFl7nBQUgaUtHRChrZSXdRNmv1J49QPLwyJYWuq51YEBXQW5mPAlvB1BGfiNJPSHSGxWALFZBI5WA==
"@gitlab/ui@94.0.0":
version "94.0.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-94.0.0.tgz#6c17bcb37a9fbc521099a65eee8a52aba45e3163"
integrity sha512-qO3QaQa8nbsTM1sCwlz6QKYXYQ1DntFWj9GcmlxVolDL2Ctkl9tAf8VLooPT0fqwUr6CYsim+N/jibBlOt6xnQ==
"@gitlab/ui@94.0.1":
version "94.0.1"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-94.0.1.tgz#4be575ab6e6583ed65ac7c78e70d827706367639"
integrity sha512-6RD78bQ7Bff1qU3k7E8ZU/4YQ8PD68aRaE7eHojKBtAfR9ClGvPeFMFkTFAdnfV5xjBOE6tKMzR0pOhZE3V6cQ==
dependencies:
"@floating-ui/dom" "1.4.3"
echarts "^5.3.2"