Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
eca41b94ac
commit
f168fb05f0
|
|
@ -13,7 +13,6 @@ Gitlab/FeatureFlagWithoutActor:
|
|||
- 'app/controllers/graphql_controller.rb'
|
||||
- 'app/controllers/groups/settings/slacks_controller.rb'
|
||||
- 'app/controllers/groups_controller.rb'
|
||||
- 'app/controllers/jwks_controller.rb'
|
||||
- 'app/controllers/omniauth_callbacks_controller.rb'
|
||||
- 'app/controllers/profiles/two_factor_auths_controller.rb'
|
||||
- 'app/controllers/projects/settings/integrations_controller.rb'
|
||||
|
|
@ -37,7 +36,6 @@ Gitlab/FeatureFlagWithoutActor:
|
|||
- 'app/helpers/projects/topics_helper.rb'
|
||||
- 'app/helpers/projects_helper.rb'
|
||||
- 'app/helpers/routing/pseudonymization_helper.rb'
|
||||
- 'app/models/ci/build.rb'
|
||||
- 'app/models/ci/pipeline.rb'
|
||||
- 'app/models/ci/runner.rb'
|
||||
- 'app/models/ci/secure_file.rb'
|
||||
|
|
@ -66,7 +64,6 @@ Gitlab/FeatureFlagWithoutActor:
|
|||
- 'app/services/ci/job_artifacts/update_unknown_locked_status_service.rb'
|
||||
- 'app/services/ci/queue/pending_builds_strategy.rb'
|
||||
- 'app/services/ci/register_job_service.rb'
|
||||
- 'app/services/ci/runners/register_runner_service.rb'
|
||||
- 'app/services/ci/stuck_builds/drop_running_service.rb'
|
||||
- 'app/services/click_house/sync_strategies/audit_event_sync_strategy.rb'
|
||||
- 'app/services/concerns/measurable.rb'
|
||||
|
|
@ -75,7 +72,6 @@ Gitlab/FeatureFlagWithoutActor:
|
|||
- 'app/services/integrations/propagation/bulk_update_service.rb'
|
||||
- 'app/services/projects/participants_service.rb'
|
||||
- 'app/services/user_project_access_changed_service.rb'
|
||||
- 'app/services/users/activity_service.rb'
|
||||
- 'app/views/admin/application_settings/_email.html.haml'
|
||||
- 'app/views/admin/application_settings/_invitation_flow_enforcement.html.haml'
|
||||
- 'app/views/admin/application_settings/general.html.haml'
|
||||
|
|
@ -122,7 +118,6 @@ Gitlab/FeatureFlagWithoutActor:
|
|||
- 'app/workers/pipeline_schedule_worker.rb'
|
||||
- 'app/workers/projects/refresh_build_artifacts_size_statistics_worker.rb'
|
||||
- 'app/workers/prune_old_events_worker.rb'
|
||||
- 'app/workers/stuck_ci_jobs_worker.rb'
|
||||
- 'config/initializers/active_record_transaction_observer.rb'
|
||||
- 'config/initializers/carrierwave_s3_encryption_headers_patch.rb'
|
||||
- 'ee/app/controllers/ee/admin/application_settings_controller.rb'
|
||||
|
|
@ -141,7 +136,6 @@ Gitlab/FeatureFlagWithoutActor:
|
|||
- 'ee/app/models/concerns/geo/replicable_model.rb'
|
||||
- 'ee/app/models/ee/application_setting.rb'
|
||||
- 'ee/app/models/ee/integration.rb'
|
||||
- 'ee/app/models/ee/member.rb'
|
||||
- 'ee/app/models/ee/user.rb'
|
||||
- 'ee/app/models/integrations/git_guardian.rb'
|
||||
- 'ee/app/models/members/member_role.rb'
|
||||
|
|
@ -155,7 +149,6 @@ Gitlab/FeatureFlagWithoutActor:
|
|||
- 'ee/app/presenters/ee/project_presenter.rb'
|
||||
- 'ee/app/services/ee/git/branch_push_service.rb'
|
||||
- 'ee/app/services/ee/groups/transfer_service.rb'
|
||||
- 'ee/app/services/ee/members/update_service.rb'
|
||||
- 'ee/app/services/ee/notification_service.rb'
|
||||
- 'ee/app/services/ee/users/build_service.rb'
|
||||
- 'ee/app/services/epics/strategies/base_dates_strategy.rb'
|
||||
|
|
@ -207,7 +200,6 @@ Gitlab/FeatureFlagWithoutActor:
|
|||
- 'ee/lib/api/epics.rb'
|
||||
- 'ee/lib/api/internal/search/zoekt.rb'
|
||||
- 'ee/lib/api/internal/suggested_reviewers.rb'
|
||||
- 'ee/lib/api/protected_environments.rb'
|
||||
- 'ee/lib/ee/api/entities/application_setting.rb'
|
||||
- 'ee/lib/ee/api/geo.rb'
|
||||
- 'ee/lib/ee/api/internal/base.rb'
|
||||
|
|
@ -267,6 +259,7 @@ Gitlab/FeatureFlagWithoutActor:
|
|||
- 'lib/gitlab/database/health_status/indicators/autovacuum_active_on_table.rb'
|
||||
- 'lib/gitlab/database/health_status/indicators/patroni_apdex.rb'
|
||||
- 'lib/gitlab/database/health_status/indicators/wal_rate.rb'
|
||||
- 'lib/gitlab/database/health_status/indicators/wal_receiver_saturation.rb'
|
||||
- 'lib/gitlab/database/health_status/indicators/write_ahead_log.rb'
|
||||
- 'lib/gitlab/database/load_balancing/host.rb'
|
||||
- 'lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables.rb'
|
||||
|
|
|
|||
|
|
@ -18,7 +18,14 @@ export default {
|
|||
directives: {
|
||||
GlTooltip: GlTooltipDirective,
|
||||
},
|
||||
inject: ['blobHash'],
|
||||
inject: {
|
||||
blobHash: {
|
||||
default: '',
|
||||
},
|
||||
canDownloadCode: {
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
props: {
|
||||
rawPath: {
|
||||
type: String,
|
||||
|
|
@ -123,11 +130,12 @@ export default {
|
|||
variant="default"
|
||||
/>
|
||||
<gl-button
|
||||
v-if="!isEmpty"
|
||||
v-if="!isEmpty && canDownloadCode"
|
||||
v-gl-tooltip.hover
|
||||
:aria-label="$options.BTN_DOWNLOAD_TITLE"
|
||||
:title="$options.BTN_DOWNLOAD_TITLE"
|
||||
:href="downloadUrl"
|
||||
data-testid="download-button"
|
||||
target="_blank"
|
||||
icon="download"
|
||||
category="primary"
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import {
|
|||
GlLink,
|
||||
GlLoadingIcon,
|
||||
GlTable,
|
||||
GlForm,
|
||||
GlFormInput,
|
||||
GlDropdown,
|
||||
GlDropdownItem,
|
||||
|
|
@ -100,6 +101,7 @@ export default {
|
|||
GlLink,
|
||||
GlLoadingIcon,
|
||||
GlTable,
|
||||
GlForm,
|
||||
GlFormInput,
|
||||
GlSprintf,
|
||||
GlPagination,
|
||||
|
|
@ -343,14 +345,15 @@ export default {
|
|||
<div v-else class="gl-px-5">{{ __("You don't have any recent searches") }}</div>
|
||||
</gl-dropdown>
|
||||
<div class="filtered-search-input-container gl-flex-grow-1">
|
||||
<gl-form-input
|
||||
v-model="errorSearchQuery"
|
||||
class="gl-pl-3! filtered-search"
|
||||
:disabled="loading"
|
||||
:placeholder="__('Search or filter results…')"
|
||||
autofocus
|
||||
@keyup.enter.native="searchByQuery(errorSearchQuery)"
|
||||
/>
|
||||
<gl-form @submit.prevent="searchByQuery(errorSearchQuery)">
|
||||
<gl-form-input
|
||||
v-model="errorSearchQuery"
|
||||
class="gl-pl-3! filtered-search"
|
||||
:disabled="loading"
|
||||
:placeholder="__('Search or filter results…')"
|
||||
autofocus
|
||||
/>
|
||||
</gl-form>
|
||||
</div>
|
||||
<div class="gl-search-box-by-type-right-icons">
|
||||
<gl-button
|
||||
|
|
|
|||
|
|
@ -613,6 +613,372 @@ export async function fetchLogs(logsSearchUrl, { pageToken, pageSize, filters =
|
|||
}
|
||||
}
|
||||
|
||||
export async function fetchLogsSearchMetadata(_logsSearchMetadataUrl, { filters = {} }) {
|
||||
try {
|
||||
const params = new URLSearchParams();
|
||||
|
||||
const { dateRange, attributes } = filters;
|
||||
if (dateRange) {
|
||||
addDateRangeFilterToQueryParams(dateRange, params);
|
||||
}
|
||||
|
||||
if (attributes) {
|
||||
addLogsAttributesFiltersToQueryParams(attributes, params);
|
||||
}
|
||||
|
||||
// TODO remove mocks (and add UTs) when API is ready https://gitlab.com/gitlab-org/opstrace/opstrace/-/issues/2782
|
||||
// const { data } = await axios.get(logsSearchMetadataUrl, {
|
||||
// withCredentials: true,
|
||||
// params,
|
||||
// });
|
||||
// return data;
|
||||
|
||||
return {
|
||||
start_ts: 1713513680617331200,
|
||||
end_ts: 1714723280617331200,
|
||||
summary: {
|
||||
service_names: ['adservice', 'cartservice', 'quoteservice', 'recommendationservice'],
|
||||
trace_flags: [0, 1],
|
||||
severity_names: ['info', 'warn'],
|
||||
severity_numbers: [9, 13],
|
||||
},
|
||||
severity_numbers_counts: [
|
||||
{
|
||||
time: 1713519360000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713545280000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713571200000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713597120000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713623040000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713648960000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713674880000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713700800000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713726720000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713752640000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713778560000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713804480000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713830400000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713856320000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713882240000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713908160000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713934080000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713960000000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1713985920000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714011840000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714037760000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714063680000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714089600000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714115520000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714141440000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714167360000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714193280000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714219200000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714245120000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714271040000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714296960000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714322880000000000,
|
||||
counts: {
|
||||
13: 1,
|
||||
9: 26202,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714348800000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 53103,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714374720000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 52854,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714400640000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 49598,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714426560000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 45266,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714452480000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 44951,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714478400000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 45096,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714504320000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 45301,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714530240000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 44894,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714556160000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 45444,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714582080000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 45067,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714608000000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 45119,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714633920000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 45817,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714659840000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 44574,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714685760000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 44652,
|
||||
},
|
||||
},
|
||||
{
|
||||
time: 1714711680000000000,
|
||||
counts: {
|
||||
13: 0,
|
||||
9: 20470,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
} catch (e) {
|
||||
return reportErrorAndThrow(e);
|
||||
}
|
||||
}
|
||||
|
||||
/** ****
|
||||
*
|
||||
* ObservabilityClient
|
||||
|
|
@ -634,6 +1000,7 @@ export function buildClient(config) {
|
|||
metricsSearchUrl,
|
||||
metricsSearchMetadataUrl,
|
||||
logsSearchUrl,
|
||||
logsSearchMetadataUrl,
|
||||
} = config;
|
||||
|
||||
if (typeof provisioningUrl !== 'string') {
|
||||
|
|
@ -672,6 +1039,10 @@ export function buildClient(config) {
|
|||
throw new Error('logsSearchUrl param must be a string');
|
||||
}
|
||||
|
||||
if (typeof logsSearchMetadataUrl !== 'string') {
|
||||
throw new Error('logsSearchMetadataUrl param must be a string');
|
||||
}
|
||||
|
||||
return {
|
||||
enableObservability: () => enableObservability(provisioningUrl),
|
||||
isObservabilityEnabled: () => isObservabilityEnabled(provisioningUrl),
|
||||
|
|
@ -686,5 +1057,6 @@ export function buildClient(config) {
|
|||
fetchMetricSearchMetadata: (metricName, metricType) =>
|
||||
fetchMetricSearchMetadata(metricsSearchMetadataUrl, metricName, metricType),
|
||||
fetchLogs: (options) => fetchLogs(logsSearchUrl, options),
|
||||
fetchLogsSearchMetadata: (options) => fetchLogsSearchMetadata(logsSearchMetadataUrl, options),
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ export default {
|
|||
if (filter.type === TOKEN_TYPE_STATUS) {
|
||||
return {
|
||||
...acc,
|
||||
packageStatus: filter.value.data,
|
||||
packageStatus: filter.value.data.toUpperCase(),
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
<script>
|
||||
import { GlAlert } from '@gitlab/ui';
|
||||
import { GlAlert, GlButton } from '@gitlab/ui';
|
||||
import { s__, sprintf, n__ } from '~/locale';
|
||||
import { mergeUrlParams } from '~/lib/utils/url_utility';
|
||||
import PackagesListRow from '~/packages_and_registries/package_registry/components/list/package_list_row.vue';
|
||||
import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue';
|
||||
import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
|
||||
|
|
@ -30,6 +31,7 @@ export default {
|
|||
name: 'PackagesList',
|
||||
components: {
|
||||
GlAlert,
|
||||
GlButton,
|
||||
DeleteModal,
|
||||
PackagesListLoader,
|
||||
PackagesListRow,
|
||||
|
|
@ -43,6 +45,11 @@ export default {
|
|||
required: false,
|
||||
default: () => [],
|
||||
},
|
||||
hideErrorAlert: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
default: false,
|
||||
},
|
||||
isLoading: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
|
|
@ -84,19 +91,49 @@ export default {
|
|||
};
|
||||
},
|
||||
errorTitleAlert() {
|
||||
return sprintf(
|
||||
s__('PackageRegistry|There was an error publishing a %{packageName} package'),
|
||||
{ packageName: this.errorPackages[0].name },
|
||||
);
|
||||
if (this.singleErrorPackage) {
|
||||
return sprintf(
|
||||
s__('PackageRegistry|There was an error publishing a %{packageName} package'),
|
||||
{ packageName: this.singleErrorPackage.name },
|
||||
);
|
||||
}
|
||||
return sprintf(s__('PackageRegistry|There was an error publishing %{count} packages'), {
|
||||
count: this.errorPackages.length,
|
||||
});
|
||||
},
|
||||
errorMessageBodyAlert() {
|
||||
if (this.errorPackages[0]?.statusMessage) {
|
||||
return this.errorPackages[0].statusMessage;
|
||||
if (this.singleErrorPackage) {
|
||||
return this.singleErrorPackage.statusMessage || this.$options.i18n.errorMessageBodyAlert;
|
||||
}
|
||||
return this.$options.i18n.errorMessageBodyAlert;
|
||||
|
||||
return sprintf(
|
||||
s__(
|
||||
'PackageRegistry|%{count} packages were not published to the registry. Remove these packages and try again.',
|
||||
),
|
||||
{
|
||||
count: this.errorPackages.length,
|
||||
},
|
||||
);
|
||||
},
|
||||
singleErrorPackage() {
|
||||
if (this.errorPackages.length === 1) {
|
||||
const [errorPackage] = this.errorPackages;
|
||||
return errorPackage;
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
showErrorPackageAlert() {
|
||||
return this.errorPackages.length > 0;
|
||||
return this.errorPackages.length > 0 && !this.hideErrorAlert;
|
||||
},
|
||||
errorPackagesHref() {
|
||||
// For reactivity we depend on showErrorPackageAlert so we update accordingly
|
||||
if (!this.showErrorPackageAlert) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const pageParams = { after: null, before: null };
|
||||
return mergeUrlParams({ status: 'error', ...pageParams }, window.location.href);
|
||||
},
|
||||
packageTypesWithForwardingEnabled() {
|
||||
return Object.keys(this.groupSettings)
|
||||
|
|
@ -147,7 +184,7 @@ export default {
|
|||
this.itemsToBeDeleted = [];
|
||||
},
|
||||
showConfirmationModal() {
|
||||
this.setItemsToBeDeleted([this.errorPackages[0]]);
|
||||
this.setItemsToBeDeleted([this.singleErrorPackage]);
|
||||
},
|
||||
},
|
||||
i18n: {
|
||||
|
|
@ -173,10 +210,17 @@ export default {
|
|||
class="gl-mt-5"
|
||||
variant="danger"
|
||||
:title="errorTitleAlert"
|
||||
:primary-button-text="$options.i18n.deleteThisPackage"
|
||||
@primaryAction="showConfirmationModal"
|
||||
>{{ errorMessageBodyAlert }}</gl-alert
|
||||
>
|
||||
{{ errorMessageBodyAlert }}
|
||||
<template #actions>
|
||||
<gl-button v-if="singleErrorPackage" variant="confirm" @click="showConfirmationModal">{{
|
||||
$options.i18n.deleteThisPackage
|
||||
}}</gl-button>
|
||||
<gl-button v-else :href="errorPackagesHref" variant="confirm">{{
|
||||
s__('PackageRegistry|Show packages with errors')
|
||||
}}</gl-button>
|
||||
</template>
|
||||
</gl-alert>
|
||||
<registry-list
|
||||
data-testid="packages-table"
|
||||
:hidden-delete="!canDeletePackages"
|
||||
|
|
|
|||
|
|
@ -222,13 +222,13 @@ export const PACKAGE_TYPES_OPTIONS = [
|
|||
|
||||
export const PACKAGE_STATUS_OPTIONS = [
|
||||
{
|
||||
value: PACKAGE_DEFAULT_STATUS,
|
||||
value: PACKAGE_DEFAULT_STATUS.toLowerCase(),
|
||||
title: s__('PackageRegistry|Default'),
|
||||
},
|
||||
{ value: PACKAGE_ERROR_STATUS, title: s__('PackageRegistry|Error') },
|
||||
{ value: 'HIDDEN', title: s__('PackageRegistry|Hidden') },
|
||||
{ value: 'PENDING_DESTRUCTION', title: s__('PackageRegistry|Pending deletion') },
|
||||
{ value: 'PROCESSING', title: s__('PackageRegistry|Processing') },
|
||||
{ value: PACKAGE_ERROR_STATUS.toLowerCase(), title: s__('PackageRegistry|Error') },
|
||||
{ value: 'hidden', title: s__('PackageRegistry|Hidden') },
|
||||
{ value: 'pending_destruction', title: s__('PackageRegistry|Pending deletion') },
|
||||
{ value: 'processing', title: s__('PackageRegistry|Processing') },
|
||||
];
|
||||
|
||||
// links
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import {
|
|||
GRAPHQL_PAGE_SIZE,
|
||||
DELETE_PACKAGE_SUCCESS_MESSAGE,
|
||||
EMPTY_LIST_HELP_URL,
|
||||
PACKAGE_ERROR_STATUS,
|
||||
PACKAGE_HELP_URL,
|
||||
} from '~/packages_and_registries/package_registry/constants';
|
||||
import getPackagesQuery from '~/packages_and_registries/package_registry/graphql/queries/get_packages.query.graphql';
|
||||
|
|
@ -126,6 +127,9 @@ export default {
|
|||
isLoading() {
|
||||
return this.$apollo.queries.packagesResource.loading || this.isDeleteInProgress;
|
||||
},
|
||||
isFilteredByErrorStatus() {
|
||||
return this.filters?.packageStatus?.toUpperCase() === PACKAGE_ERROR_STATUS;
|
||||
},
|
||||
refetchQueriesData() {
|
||||
return [
|
||||
{
|
||||
|
|
@ -198,6 +202,7 @@ export default {
|
|||
>
|
||||
<template #default="{ deletePackages }">
|
||||
<package-list
|
||||
:hide-error-alert="isFilteredByErrorStatus"
|
||||
:group-settings="groupSettings"
|
||||
:list="packages.nodes"
|
||||
:is-loading="isLoading"
|
||||
|
|
|
|||
|
|
@ -224,7 +224,7 @@ export default {
|
|||
{
|
||||
key: 'rowActions',
|
||||
label: __('Actions'),
|
||||
thClass: 'gl-display-none',
|
||||
thClass: 'gl-text-right',
|
||||
tdClass: '!gl-align-middle gl-text-right',
|
||||
},
|
||||
],
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import {
|
|||
GlModalDirective,
|
||||
GlTooltipDirective,
|
||||
GlFormSelect,
|
||||
GlSprintf,
|
||||
} from '@gitlab/ui';
|
||||
import packagesProtectionRuleQuery from '~/packages_and_registries/settings/project/graphql/queries/get_packages_protection_rules.query.graphql';
|
||||
import { getPackageTypeLabel } from '~/packages_and_registries/package_registry/utils';
|
||||
|
|
@ -37,6 +38,7 @@ export default {
|
|||
GlKeysetPagination,
|
||||
GlModal,
|
||||
GlFormSelect,
|
||||
GlSprintf,
|
||||
},
|
||||
directives: {
|
||||
GlModal: GlModalDirective,
|
||||
|
|
@ -49,9 +51,12 @@ export default {
|
|||
'PackageRegistry|When a package is protected then only certain user roles are able to update and delete the protected package. This helps to avoid tampering with the package.',
|
||||
),
|
||||
protectionRuleDeletionConfirmModal: {
|
||||
title: s__('PackageRegistry|Are you sure you want to delete the package protection rule?'),
|
||||
description: s__(
|
||||
'PackageRegistry|Users with at least the Developer role for this project will be able to publish, edit, and delete packages.',
|
||||
title: s__('PackageRegistry|Delete package protection rule?'),
|
||||
descriptionWarning: s__(
|
||||
'PackageRegistry|You are about to delete the package protection rule for %{packageNamePattern}.',
|
||||
),
|
||||
descriptionConsequence: s__(
|
||||
'PackageRegistry|Users with at least the Developer role for this project will be able to publish, edit, and delete packages with this package name.',
|
||||
),
|
||||
},
|
||||
pushProtectedUpToAccessLevel: I18N_PUSH_PROTECTED_UP_TO_ACCESS_LEVEL,
|
||||
|
|
@ -92,7 +97,7 @@ export default {
|
|||
},
|
||||
modalActionPrimary() {
|
||||
return {
|
||||
text: __('Delete'),
|
||||
text: s__('PackageRegistry|Delete package protection rule'),
|
||||
attributes: {
|
||||
variant: 'danger',
|
||||
},
|
||||
|
|
@ -246,8 +251,8 @@ export default {
|
|||
},
|
||||
{
|
||||
key: 'col_4_actions',
|
||||
label: '',
|
||||
thClass: 'gl-display-none',
|
||||
label: __('Actions'),
|
||||
thClass: 'gl-text-right',
|
||||
tdClass: '!gl-align-middle gl-text-right',
|
||||
},
|
||||
],
|
||||
|
|
@ -328,10 +333,10 @@ export default {
|
|||
<gl-button
|
||||
v-gl-tooltip
|
||||
v-gl-modal="$options.modal.id"
|
||||
category="secondary"
|
||||
variant="danger"
|
||||
category="tertiary"
|
||||
icon="remove"
|
||||
:title="s__('PackageRegistry|Delete rule')"
|
||||
:title="__('Delete')"
|
||||
:aria-label="__('Delete')"
|
||||
:disabled="isProtectionRuleDeleteButtonDisabled(item)"
|
||||
@click="showProtectionRuleDeletionConfirmModal(item)"
|
||||
/>
|
||||
|
|
@ -350,6 +355,7 @@ export default {
|
|||
</gl-card>
|
||||
|
||||
<gl-modal
|
||||
v-if="protectionRuleMutationItem"
|
||||
:modal-id="$options.modal.id"
|
||||
size="sm"
|
||||
:title="$options.i18n.protectionRuleDeletionConfirmModal.title"
|
||||
|
|
@ -357,7 +363,16 @@ export default {
|
|||
:action-cancel="modalActionCancel"
|
||||
@primary="deleteProtectionRule(protectionRuleMutationItem)"
|
||||
>
|
||||
<p>{{ $options.i18n.protectionRuleDeletionConfirmModal.description }}</p>
|
||||
<p>
|
||||
<gl-sprintf
|
||||
:message="$options.i18n.protectionRuleDeletionConfirmModal.descriptionWarning"
|
||||
>
|
||||
<template #packageNamePattern>
|
||||
<strong>{{ protectionRuleMutationItem.col_1_package_name_pattern }}</strong>
|
||||
</template>
|
||||
</gl-sprintf>
|
||||
</p>
|
||||
<p>{{ $options.i18n.protectionRuleDeletionConfirmModal.descriptionConsequence }}</p>
|
||||
</gl-modal>
|
||||
</template>
|
||||
</settings-block>
|
||||
|
|
|
|||
|
|
@ -78,6 +78,7 @@ if (viewBlobEl) {
|
|||
userId,
|
||||
explainCodeAvailable,
|
||||
refType,
|
||||
canDownloadCode,
|
||||
...dataset
|
||||
} = viewBlobEl.dataset;
|
||||
|
||||
|
|
@ -94,6 +95,7 @@ if (viewBlobEl) {
|
|||
resourceId,
|
||||
userId,
|
||||
explainCodeAvailable: parseBoolean(explainCodeAvailable),
|
||||
canDownloadCode: parseBoolean(canDownloadCode),
|
||||
...provideWebIdeLink(dataset),
|
||||
},
|
||||
render(createElement) {
|
||||
|
|
|
|||
|
|
@ -71,6 +71,10 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
&:last-of-type {
|
||||
border-bottom-left-radius: $border-radius-default-inner;
|
||||
}
|
||||
}
|
||||
|
||||
pre .line,
|
||||
|
|
|
|||
|
|
@ -11,6 +11,8 @@ module ApplicationCable
|
|||
|
||||
def connect
|
||||
self.current_user = find_user_from_bearer_token || find_user_from_session_store
|
||||
rescue Gitlab::Auth::UnauthorizedError
|
||||
reject_unauthorized_connection
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -0,0 +1,35 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Mutations
|
||||
module Ml
|
||||
module ModelVersions
|
||||
class Delete < BaseMutation
|
||||
graphql_name 'MlModelVersionDelete'
|
||||
|
||||
authorize :write_model_registry
|
||||
|
||||
argument :id, ::Types::GlobalIDType[::Ml::ModelVersion],
|
||||
required: true,
|
||||
description: 'Global ID of the model version to be deleted.'
|
||||
|
||||
field :model_version, ::Types::Ml::ModelVersionType,
|
||||
description: 'Deleted model version.', null: true
|
||||
|
||||
def resolve(**args)
|
||||
model_version = ::Ml::ModelVersion.find_by_id(args[:id].model_id)
|
||||
|
||||
return { errors: [_('Model version not found')] } unless model_version
|
||||
|
||||
authorize!(model_version.project)
|
||||
|
||||
result = ::Ml::DestroyModelVersionService.new(model_version, current_user).execute
|
||||
|
||||
{
|
||||
model_version: result.payload[:model_version],
|
||||
errors: result.errors
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -208,6 +208,7 @@ module Types
|
|||
mount_mutation Mutations::Admin::AbuseReportLabels::Create, alpha: { milestone: '16.4' }
|
||||
mount_mutation Mutations::Ml::Models::Create, alpha: { milestone: '16.8' }
|
||||
mount_mutation Mutations::Ml::Models::Destroy, alpha: { milestone: '16.10' }
|
||||
mount_mutation Mutations::Ml::ModelVersions::Delete, alpha: { milestone: '17.0' }
|
||||
mount_mutation Mutations::BranchRules::Delete, alpha: { milestone: '16.9' }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -297,7 +297,8 @@ module BlobHelper
|
|||
resource_id: project.to_global_id,
|
||||
user_id: current_user.present? ? current_user.to_global_id : '',
|
||||
target_branch: project.empty_repo? ? ref : @ref,
|
||||
original_branch: @ref
|
||||
original_branch: @ref,
|
||||
can_download_code: can?(current_user, :download_code, project).to_s
|
||||
}
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,34 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ml
|
||||
class DestroyModelVersionService
|
||||
def initialize(model_version, user)
|
||||
@model_version = model_version
|
||||
@user = user
|
||||
end
|
||||
|
||||
def execute
|
||||
if model_version.package.present?
|
||||
result = ::Packages::MarkPackageForDestructionService
|
||||
.new(container: model_version.package, current_user: @user)
|
||||
.execute
|
||||
|
||||
return ServiceResponse.error(message: result.message, payload: payload) unless result.success?
|
||||
end
|
||||
|
||||
if model_version.destroy
|
||||
ServiceResponse.success(payload: payload)
|
||||
else
|
||||
ServiceResponse.error(message: model_version.errors.full_messages, payload: payload)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def payload
|
||||
{ model_version: model_version }
|
||||
end
|
||||
|
||||
attr_reader :model_version, :user
|
||||
end
|
||||
end
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ml
|
||||
module ModelVersions
|
||||
class DeleteService
|
||||
def initialize(project, name, version, user)
|
||||
@project = project
|
||||
@name = name
|
||||
@version = version
|
||||
@user = user
|
||||
end
|
||||
|
||||
def execute
|
||||
model_version = Ml::ModelVersion
|
||||
.by_project_id_name_and_version(@project.id, @name, @version)
|
||||
return ServiceResponse.error(message: 'Model not found') unless model_version
|
||||
|
||||
if model_version.package.present?
|
||||
result = ::Packages::MarkPackageForDestructionService
|
||||
.new(container: model_version.package, current_user: @user)
|
||||
.execute
|
||||
|
||||
return ServiceResponse.error(message: result.message) unless result.success?
|
||||
end
|
||||
|
||||
return ServiceResponse.error(message: 'Could not destroy the model version') unless model_version.destroy
|
||||
|
||||
ServiceResponse.success
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -2568,6 +2568,15 @@
|
|||
:weight: 1
|
||||
:idempotent: false
|
||||
:tags: []
|
||||
- :name: bitbucket_server_import_import_pull_request_note
|
||||
:worker_name: Gitlab::BitbucketServerImport::ImportPullRequestNoteWorker
|
||||
:feature_category: :importers
|
||||
:has_external_dependencies: true
|
||||
:urgency: :low
|
||||
:resource_boundary: :unknown
|
||||
:weight: 1
|
||||
:idempotent: false
|
||||
:tags: []
|
||||
- :name: bitbucket_server_import_import_pull_request_notes
|
||||
:worker_name: Gitlab::BitbucketServerImport::ImportPullRequestNotesWorker
|
||||
:feature_category: :importers
|
||||
|
|
|
|||
|
|
@ -0,0 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BitbucketServerImport
|
||||
class ImportPullRequestNoteWorker # rubocop:disable Scalability/IdempotentWorker -- The worker should not run multiple times to avoid creating multiple import
|
||||
include ObjectImporter
|
||||
|
||||
def importer_class
|
||||
Importers::PullRequestNoteImporter
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -121,6 +121,8 @@
|
|||
- 1
|
||||
- - bitbucket_server_import_import_pull_request
|
||||
- 1
|
||||
- - bitbucket_server_import_import_pull_request_note
|
||||
- 1
|
||||
- - bitbucket_server_import_import_pull_request_notes
|
||||
- 1
|
||||
- - bitbucket_server_import_stage_finish_import
|
||||
|
|
|
|||
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class DropIndexAbuseReportsOnUserId < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.0'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
TABLE_NAME = :abuse_reports
|
||||
INDEX_NAME = :index_abuse_reports_on_user_id
|
||||
COLUMN_NAMES = [:user_id]
|
||||
|
||||
def up
|
||||
remove_concurrent_index_by_name(TABLE_NAME, INDEX_NAME)
|
||||
end
|
||||
|
||||
def down
|
||||
add_concurrent_index(TABLE_NAME, COLUMN_NAMES, name: INDEX_NAME)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
19922230b7ea54c41e5368bea5c680db08f5ef46f5df822f2be0471d60822974
|
||||
|
|
@ -24248,8 +24248,6 @@ CREATE INDEX index_abuse_reports_on_status_category_and_id ON abuse_reports USIN
|
|||
|
||||
CREATE INDEX index_abuse_reports_on_status_reporter_id_and_id ON abuse_reports USING btree (status, reporter_id, id);
|
||||
|
||||
CREATE INDEX index_abuse_reports_on_user_id ON abuse_reports USING btree (user_id);
|
||||
|
||||
CREATE INDEX index_abuse_trust_scores_on_user_id_and_source_and_created_at ON abuse_trust_scores USING btree (user_id, source, created_at);
|
||||
|
||||
CREATE UNIQUE INDEX "index_achievements_on_namespace_id_LOWER_name" ON achievements USING btree (namespace_id, lower(name));
|
||||
|
|
|
|||
|
|
@ -54,6 +54,8 @@ For push and tag events, the same structure and deprecations are followed as [pr
|
|||
|
||||
## Create a system hook
|
||||
|
||||
> - **Name** and **Description** [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141977) in GitLab 16.9.
|
||||
|
||||
To create a system hook:
|
||||
|
||||
1. On the left sidebar, at the bottom, select **Admin Area**.
|
||||
|
|
|
|||
|
|
@ -4,12 +4,17 @@ group: Product Planning
|
|||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
|
||||
---
|
||||
|
||||
# Epics API
|
||||
# Epics API (deprecated)
|
||||
|
||||
DETAILS:
|
||||
**Tier:** Premium, Ultimate
|
||||
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
|
||||
|
||||
WARNING:
|
||||
The Epics REST API was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/460668) in GitLab 17.0
|
||||
and is planned for removal in v5 of the API. Use the [Work Items API](../architecture/blueprints/work_items/index.md) instead.
|
||||
This change is a breaking change.
|
||||
|
||||
Every API call to epic must be authenticated.
|
||||
|
||||
If a user is not a member of a private group, a `GET` request on that group results in a `404` status code.
|
||||
|
|
|
|||
|
|
@ -6506,6 +6506,29 @@ Input type: `MlModelDestroyInput`
|
|||
| <a id="mutationmlmodeldestroymessage"></a>`message` | [`String`](#string) | Model deletion result message. |
|
||||
| <a id="mutationmlmodeldestroymodel"></a>`model` | [`MlModel`](#mlmodel) | Model after mutation. |
|
||||
|
||||
### `Mutation.mlModelVersionDelete`
|
||||
|
||||
DETAILS:
|
||||
**Introduced** in GitLab 17.0.
|
||||
**Status**: Experiment.
|
||||
|
||||
Input type: `MlModelVersionDeleteInput`
|
||||
|
||||
#### Arguments
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="mutationmlmodelversiondeleteclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
|
||||
| <a id="mutationmlmodelversiondeleteid"></a>`id` | [`MlModelVersionID!`](#mlmodelversionid) | Global ID of the model version to be deleted. |
|
||||
|
||||
#### Fields
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="mutationmlmodelversiondeleteclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
|
||||
| <a id="mutationmlmodelversiondeleteerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
|
||||
| <a id="mutationmlmodelversiondeletemodelversion"></a>`modelVersion` | [`MlModelVersion`](#mlmodelversion) | Deleted model version. |
|
||||
|
||||
### `Mutation.namespaceBanDestroy`
|
||||
|
||||
Input type: `NamespaceBanDestroyInput`
|
||||
|
|
|
|||
|
|
@ -224,7 +224,9 @@ curl --request POST \
|
|||
|
||||
## Import repository from Bitbucket Cloud
|
||||
|
||||
Import your projects from Bitbucket Cloud to GitLab using the API.
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/215036) in GitLab 17.0.
|
||||
|
||||
Import your projects from Bitbucket Cloud to GitLab using by the API.
|
||||
|
||||
Prerequisites:
|
||||
|
||||
|
|
|
|||
|
|
@ -9,20 +9,22 @@ owning-stage: ""
|
|||
|
||||
# Defining bounded contexts
|
||||
|
||||
## Status quo
|
||||
## Historical context
|
||||
|
||||
Today the GitLab codebase doesn't have a clear domain structure.
|
||||
Until May 2024 the GitLab codebase didn't have a clear domain structure.
|
||||
We have [forced the creation of some modules](https://gitlab.com/gitlab-org/gitlab/-/issues/212156)
|
||||
as a first step but we don't have a well defined strategy for doing it consistently.
|
||||
as a first step but we didn't have a well defined strategy for doing it consistently.
|
||||
|
||||
The majority of the code is not properly namespaced and organized:
|
||||
The majority of the code was not properly namespaced and organized:
|
||||
|
||||
- Ruby namespaces used don't always represent the SSoT. We have overlapping concepts spread across multiple
|
||||
- Ruby namespaces used didn't always represent the SSoT. We had overlapping concepts spread across multiple
|
||||
namespaces. For example: `Abuse::` and `Spam::` or `Security::Orchestration::` and `Security::SecurityOrchestration`.
|
||||
- Domain code related to the same bounded context is scattered across multiple directories.
|
||||
- Domain code is present in `lib/` directory under namespaces that differ from the same domain under `app/`.
|
||||
- Some namespaces are very shallow, containing a few classes while other namespaces are very deep and large.
|
||||
- A lot of the old code is not namespaced, making it difficult to understand the context where it's used.
|
||||
- Domain code related to the same bounded context was scattered across multiple directories.
|
||||
- Domain code was present in `lib/` directory under namespaces that differed from the same domain under `app/`.
|
||||
- Some namespaces were very shallow, containing a few classes while other namespaces were very deep and large.
|
||||
- A lot of the old code was not namespaced, making it difficult to understand the context where it was used.
|
||||
|
||||
In May 2024 we [defined and enforced bounded contexts](decisions/002_bounded_contexts_definition.md).
|
||||
|
||||
## Goal
|
||||
|
||||
|
|
@ -36,75 +38,15 @@ The majority of the code is not properly namespaced and organized:
|
|||
|
||||
## Iterations
|
||||
|
||||
### 0. Extract libraries out of the codebase
|
||||
1. [Extract libraries out of the `lib/` directory](https://gitlab.com/gitlab-org/gitlab/-/blob/4c6e120069abe751d3128c05ade45ea749a033df/doc/development/gems.md).
|
||||
- This step is non blocking to modularization but the less generic code exists in `lib/` the
|
||||
easier will be to identify and separate bounded context.
|
||||
- Isolate code that could live in a separate project, to prevent it from depending on domain code.
|
||||
|
||||
In June 2023 we've started extracing gems out of the main codebase, into
|
||||
[`gems/` directory inside the monorepo](https://gitlab.com/gitlab-org/gitlab/-/blob/4c6e120069abe751d3128c05ade45ea749a033df/doc/development/gems.md).
|
||||
|
||||
This is our first step towards modularization.
|
||||
|
||||
- We want to separate generic code from domain code (that powers the business logic).
|
||||
- We want to cleanup `lib/` directory from generic code.
|
||||
- We want to isolate code that could live in a separate project, to prevent it from depending on domain code.
|
||||
|
||||
These gems as still part of the monorepo but could be extracted into dedicated repositories if needed.
|
||||
|
||||
Extraction of gems is non blocking to modularization but the less generic code exists in `lib/` the
|
||||
easier will be identifying and separating bounded context.
|
||||
|
||||
### 1. What makes a bounded context?
|
||||
|
||||
From the research in [Proposal: split GitLab monolith into components](https://gitlab.com/gitlab-org/gitlab/-/issues/365293)
|
||||
it seems that following [product categories](https://handbook.gitlab.com/handbook/product/categories/#hierarchy), as a guideline,
|
||||
would be much better than translating organization structure into folder structure (for example, `app/modules/verify/pipeline-execution/...`).
|
||||
|
||||
However, this guideline alone is not sufficient and we need a more specific strategy:
|
||||
|
||||
- Product categories can change ownership and we have seen some pretty frequent changes, even back and forth.
|
||||
Moving code every time a product category changes ownership adds too much maintenance overhead.
|
||||
- Teams and organization changes should just mean relabelling the ownership of specific modules.
|
||||
- Bounded contexts (top level modules) should be [sufficiently deep](../../../development/software_design.md#use-namespaces-to-define-bounded-contexts)
|
||||
to encapsulate implementation details and provide a smaller interface.
|
||||
- Some product categories, such as Browser Performance Testing, are just too small to represent a bounded context on their own.
|
||||
We should have a strategy for grouping product categories together when makes sense.
|
||||
- Product categories don't necessarily translate into clean boundaries.
|
||||
`Category:Pipeline Composition` and `Category:Continuous Integration` are some examples where Pipeline Authoring team
|
||||
and Pipeline Execution team share a lot of code.
|
||||
- Some parts of the code might not have a clear product category associated to it.
|
||||
|
||||
Despite the above, product categories provide a rough view of the bounded contexts at play in the application.
|
||||
|
||||
One idea could be to use product categories to sketch the initial set of bounded contexts.
|
||||
Then, group related or strongly coupled categories under the same bounded context and create new bounded contexts if missing.
|
||||
|
||||
### 2. Identify existing bounded contexts
|
||||
|
||||
Start with listing all the Ruby files in a spreadsheet and categorize them into components following the guidelines above.
|
||||
Some of them are already pretty explicit like Ci::, Packages::, etc. Components should follow our
|
||||
[existing naming guide](../../../development/software_design.md#use-namespaces-to-define-bounded-contexts).
|
||||
|
||||
This could be a short-lived Working Group with representative members of each DevOps stage (for example, Senior+ engineers).
|
||||
The WG would help defining high-level components and will be the DRIs for driving the changes in their respective DevOps stage.
|
||||
|
||||
### 3. Publish the list of bounded contexts
|
||||
|
||||
The list of bounded contexts (top-level namespaces) extracted from the codebase should be defined statically so it can be
|
||||
used programmatically.
|
||||
|
||||
```yaml
|
||||
# file: config/bounded_contexts.yml
|
||||
bounded_contexts:
|
||||
continuous_integration:
|
||||
dir: modules/ci
|
||||
namespace: 'Ci::'
|
||||
packages: ...
|
||||
merge_requests: ...
|
||||
git: ...
|
||||
```
|
||||
|
||||
With this static list we could:
|
||||
|
||||
- Document the existing bounded contexts for engineers to see the big picture.
|
||||
- Understand where to place new classes and modules.
|
||||
- Enforce if any top-level namespaces are used that are not in the list of bounded contexts.
|
||||
- Autoload non-standard Rails directories based on the given list.
|
||||
1. [ADR-001: Modularize application domain](decisions/001_modular_application_domain.md)? Start with modularizing
|
||||
1. [ADR-002: Define bounded context around feature categories](decisions/002_bounded_contexts_definition.md) as a SSoT in the code.
|
||||
1. [ADR-003: Assign stewards to all modules and libraries](decisions/003_stewardship.md).
|
||||
1. [Publish the list of bounded contexts](../../../development/software_design.md#use-namespaces-to-define-bounded-contexts).
|
||||
- Define a SSoT list of bounded contexts.
|
||||
- Enforce enforce it using RuboCop static analyzer.
|
||||
- Autoload non-standard Rails directories based on the given list.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,53 @@
|
|||
---
|
||||
creation-date: "2024-05-07"
|
||||
authors: [ "@fabiopitino" ]
|
||||
---
|
||||
|
||||
# Modular Monolith ADR 001: Modularize the application domain
|
||||
|
||||
## Context
|
||||
|
||||
Before we modularize a codebase we first needed to define how we are going to divide it.
|
||||
|
||||
## Decision
|
||||
|
||||
We start by focusing on the application domain (backend business logic) leaving the
|
||||
application adapters (Web controllers and views, REST/GraphQL endpoints) outside the
|
||||
scope of the modularization initially.
|
||||
|
||||
The reasons for this are:
|
||||
|
||||
1. Code in application adapters may not always align with a specific
|
||||
domain. For example: a project settings endpoint or a merge request page contain
|
||||
references to many domains.
|
||||
1. There was a need to run separate Rails nodes for the SaaS architecture using different
|
||||
profiles in order to save on memory.
|
||||
For example: on SaaS we wanted to be able to spin up more Sidekiq nodes without the need
|
||||
to load the whole Rails application. The assumption is that for running Sidekiq we don't
|
||||
need ActionCable, REST endpoints, GraphQL mutations or Rails views.
|
||||
We only need the application domain and infrastructure code.
|
||||
This could still be true even with the introduction of [Cells](../../cells/index.md) but
|
||||
we need to re-evaluate this assumption.
|
||||
1. Keep the scope and effort smaller. Tackling only domain code is easier to understand than
|
||||
the complexity of how to breakdown the application adapters and all their edge cases.
|
||||
|
||||
The decision to scope out application adapters is not final and we decided to defer
|
||||
it to later.
|
||||
|
||||
Finally, the infrastructure code containing technical concerns (typically the `lib/`) will
|
||||
be part of a common "platform" module that every domain module will depend on in order to function.
|
||||
|
||||
The "platform" module can be broken down into independent libraries extracted as gems.
|
||||
|
||||
## Consequences
|
||||
|
||||
We focus on modularizing business logic primarily we simplify the rules and guidelines for
|
||||
engineers. We can apply the same set of patterns across modules.
|
||||
|
||||
## Alternatives
|
||||
|
||||
We looked into including application adapters to the modularization effort but noticed that:
|
||||
|
||||
1. Modularizing adapters is more delicate as we need to preserve user-facing dependencies like
|
||||
routes.
|
||||
1. The size of the adapters code is much smaller than the whole application domain.
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
---
|
||||
creation-date: "2024-05-07"
|
||||
authors: [ "@fabiopitino" ]
|
||||
---
|
||||
|
||||
# Modular Monolith ADR 002: Define bounded contexts
|
||||
|
||||
## Context
|
||||
|
||||
With the focus primarily on the application domain we needed to define how to
|
||||
modularize it.
|
||||
|
||||
## Decision
|
||||
|
||||
The application domain is divided into bounded contexts which define the top-level
|
||||
modules of GitLab application. The term bounded context is widely used in
|
||||
Domain-Driven Design.
|
||||
|
||||
Defining bounded contexts means to organize the code around product structure rather than
|
||||
organizational structure.
|
||||
|
||||
From the research in [Proposal: split GitLab monolith into components](https://gitlab.com/gitlab-org/gitlab/-/issues/365293)
|
||||
it seems that following [product categories](https://handbook.gitlab.com/handbook/product/categories/#hierarchy), as a guideline,
|
||||
would be much better than translating organization structure into folder structure (for example, `app/modules/verify/pipeline-execution/...`).
|
||||
|
||||
However, this guideline alone is not sufficient and we need a more specific strategy:
|
||||
|
||||
- Bounded contexts (top level modules) should be [sufficiently deep](../../../../development/software_design.md#use-namespaces-to-define-bounded-contexts)
|
||||
to encapsulate implementation details and provide a smaller interface.
|
||||
- Some product categories, such as Browser Performance Testing, are just too small to represent
|
||||
a bounded context on their own.
|
||||
We should have a strategy for grouping product categories together when makes sense.
|
||||
- Product categories don't necessarily translate into clean boundaries.
|
||||
`Category:Pipeline Composition` and `Category:Continuous Integration` are some examples
|
||||
where Pipeline Authoring team and Pipeline Execution team share a lot of code.
|
||||
- Some parts of the code might not have a clear product category associated to it.
|
||||
|
||||
Despite the above, product categories provide a rough view of the bounded contexts at play in the application.
|
||||
For that we use product categories to sketch the initial set of bounded contexts.
|
||||
Then, group related or strongly coupled categories under the same bounded context and create new bounded contexts if missing.
|
||||
|
||||
## Consequences
|
||||
|
||||
In May 2024 we completed the [Bounded Contexts working group](https://handbook.gitlab.com/handbook/company/working-groups/bounded-contexts/)
|
||||
which completed the first phase of modularization, described in this page.
|
||||
|
||||
We defined a list of [bounded contexts in code](../../../../development/software_design.md#use-namespaces-to-define-bounded-contexts)
|
||||
and started enforcing them with RuboCop, in order to move towards a fully namespaced monolith.
|
||||
Team members can edit this list by creating and deleting bounded contexts explicitly and the decision is reviewed
|
||||
by Staff+ engineers.
|
||||
|
||||
## Alternatives
|
||||
|
||||
We evaluated whether to align the code to the organizational structure but we decided it wasn't viable:
|
||||
|
||||
- Product categories can change ownership and we have seen some pretty frequent changes, even back and forth.
|
||||
Moving code every time a product category changes ownership adds too much maintenance overhead.
|
||||
- Teams and organization changes should just mean relabelling the ownership of specific modules.
|
||||
- Coupling and complexity are directly correlated to business logic and product structure.
|
||||
A code organization that aligns to organizational structure could generate unnecessary complexity and
|
||||
much more coupling.
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
---
|
||||
creation-date: "2024-05-08"
|
||||
authors: [ "@fabiopitino" ]
|
||||
---
|
||||
|
||||
# Modular Monolith ADR 003: Module stewardship
|
||||
|
||||
## Context
|
||||
|
||||
How do we assign stewardship to domain and platform modules? We have a large amount of shared code
|
||||
that does not have explicit stewards who can provide a vision and direction on that part of code.
|
||||
|
||||
## Decision
|
||||
|
||||
We use the term **stewards** instead of **owners** to be more in line with GitLab principle of
|
||||
**everyone can contribute**. Stewards are care takers of the code. They know how a specific
|
||||
functionality is designed and why. They know the architectural characteristics and constraints.
|
||||
However, they welcome changes and guide contributors towards success.
|
||||
|
||||
A module, whether is from a domain bounded context or platform module, must have at least 1 group of stewards.
|
||||
This group can be a team name (or GitLab group handle). Optionally, the list of stewards can include
|
||||
single IC entries.
|
||||
|
||||
When we will use a Packwerk package to extract a module we will be able to indicate stewardship directly
|
||||
in the `package.yml`:
|
||||
|
||||
```yaml
|
||||
metadata:
|
||||
stewards:
|
||||
- group::pipeline execution # team name
|
||||
- group::pipeline authoring # team name
|
||||
- @grzesiek # IC
|
||||
- @ayufan # IC
|
||||
```
|
||||
|
||||
For platform modules (e.g. `Gitlab::Redis`) we might not have a whole team dedicated as stewards since
|
||||
all platform code is classified as "shared". However, team members can add themselves as experts of a
|
||||
particular functionality.
|
||||
|
||||
## Consequences
|
||||
|
||||
Stewardship defined in code can be very powerful:
|
||||
|
||||
- Sections of CODEOWNERS could be automatically generated from packages' metadata.
|
||||
- Review Roulette or Suggested Reviews features can use this list as first preference.
|
||||
- Engineers can easily identify stewards and have design conversations early.
|
||||
- Gems living in the monolith (`gems/`), which should be wrapped into a Packwerk package,
|
||||
can benefit of having explicit stewards.
|
||||
|
||||
## Alternatives
|
||||
|
||||
In the initial phase of modularization, before adopting Packwerk, we don't have an explicit concept
|
||||
of ownership. We are initially relying on each team to know what bounded contexts they are responsible
|
||||
for. For the "shared code" in the platform modules we initially expect maintainers to fill the role of
|
||||
stewards.
|
||||
|
||||
- Pros: we give trainee maintainer a clear development path and goals. Today it feels unclear what they must
|
||||
learn in order to become successful maintainers.
|
||||
- Cons: The amount of "shared" code is very large and still hard to understand who knows best about
|
||||
a particular functionality. Even extracting code into gems doesn't solve the lack of explicit ownership.
|
||||
|
|
@ -94,8 +94,8 @@ monolith successful. We will work on the aspects listed below, refine them, and
|
|||
add more important details as we move forward towards the goal:
|
||||
|
||||
1. [Deliver modularization proof-of-concepts that will deliver key insights](proof_of_concepts.md).
|
||||
1. Align modularization plans to the organizational structure by [defining bounded contexts](bounded_contexts.md).
|
||||
1. [Separate domains into modules](packages_extraction.md) that will reflect organizational structure.
|
||||
1. Align modularization plans to the product structure by [defining bounded contexts](bounded_contexts.md).
|
||||
1. [Separate domains into modules](packages_extraction.md) that will reflect product structure.
|
||||
1. Start a training program for team members on how to work with decoupled domains (TODO)
|
||||
1. Build tools that will make it easier to build decoupled domains through inversion of control (TODO)
|
||||
1. [Introduce hexagonal architecture within the monolith](hexagonal_monolith/index.md)
|
||||
|
|
@ -106,6 +106,17 @@ add more important details as we move forward towards the goal:
|
|||
|
||||
In progress.
|
||||
|
||||
- A working group [Bounded Contexts](https://handbook.gitlab.com/handbook/company/working-groups/bounded-contexts/)
|
||||
was concluded in April 2024 which defined a list of bounded contexts to be enforced for GitLab Rails domain and
|
||||
infrastructure layer.
|
||||
|
||||
## Decisions
|
||||
|
||||
1. [ADR-001: Modularize application domain](decisions/001_modular_application_domain.md)? Start with modularizing
|
||||
the application domain and infrastructure code.
|
||||
1. [ADR-002: Define bounded context around feature categories](decisions/002_bounded_contexts_definition.md) as a SSoT in the code.
|
||||
1. [ADR-003: Assign stewards to all modules and libraries](decisions/003_stewardship.md).
|
||||
|
||||
## Glossary
|
||||
|
||||
- `modules` are Ruby modules and can be used to nest code hierarchically.
|
||||
|
|
|
|||
|
|
@ -44,32 +44,36 @@ using the [`coverage`](../yaml/index.md#coverage) keyword.
|
|||
|
||||
#### Test coverage examples
|
||||
|
||||
The following list shows sample regex patterns for many common test coverage tools.
|
||||
The following table lists sample regex patterns for many common test coverage tools.
|
||||
If the tooling has changed after these samples were created, or if the tooling was customized,
|
||||
the regex might not work. Test the regex carefully to make sure it correctly finds the
|
||||
coverage in the tool's output:
|
||||
|
||||
<!-- vale gitlab.Spelling = NO -->
|
||||
<!-- markdownlint-disable MD056 -->
|
||||
|
||||
- Simplecov (Ruby). Example: `/\(\d+.\d+\%\) covered/`.
|
||||
- pytest-cov (Python). Example: `/TOTAL.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/`.
|
||||
- Scoverage (Scala). Example: `/Statement coverage[A-Za-z\.*]\s*:\s*([^%]+)/`.
|
||||
- `pest --coverage --colors=never` (PHP). Example: `/^\s*Cov:\s*\d+\.\d+?%$/`.
|
||||
- `phpunit --coverage-text --colors=never` (PHP). Example: `/^\s*Lines:\s*\d+.\d+\%/`.
|
||||
- gcovr (C/C++). Example: `/^TOTAL.*\s+(\d+\%)$/`.
|
||||
- `tap --coverage-report=text-summary` (NodeJS). Example: `/^Statements\s*:\s*([^%]+)/`.
|
||||
- `nyc npm test` (NodeJS). Example: `/All files[^|]*\|[^|]*\s+([\d\.]+)/`.
|
||||
- `jest --ci --coverage` (NodeJS). Example: `/All files[^|]*\|[^|]*\s+([\d\.]+)/`.
|
||||
- excoveralls (Elixir). Example: `/\[TOTAL\]\s+(\d+\.\d+)%/`.
|
||||
- `mix test --cover` (Elixir). Example: `/\d+.\d+\%\s+\|\s+Total/`.
|
||||
- JaCoCo (Java/Kotlin). Example: `/Total.*?([0-9]{1,3})%/`.
|
||||
- `go test -cover` (Go). Example: `/^coverage: (\d+.\d+)% of statements$/`.
|
||||
- .NET (OpenCover). Example: `/(Visited Points).*\((.*)\)/`.
|
||||
- .NET (`dotnet test` line coverage). Example: `/Total\s*\|\s*(\d+(?:\.\d+)?)/`.
|
||||
- tarpaulin (Rust). Example: `/^\d+.\d+% coverage/`.
|
||||
- Pester (PowerShell). Example: `/Covered (\d+\.\d+%)/`.
|
||||
| Name | Language | Command | Example |
|
||||
|--------------|--------------|--------------|--------------|
|
||||
| Simplecov | Ruby | None | `/\(\d+.\d+\%\) covered/` |
|
||||
| pytest-cov | Python | None | `/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/` |
|
||||
| Scoverage | Scala | None | `/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/` |
|
||||
| pest | PHP | `pest --coverage --colors=never` | `/Statement coverage[A-Za-z\.*]\s*:\s*([^%]+)/` |
|
||||
| phpunit | PHP | `phpunit --coverage-text --colors=never` | `/^\s*Lines:\s*\d+.\d+\%/` |
|
||||
| gcovr | C/C++ | None | `/^TOTAL.*\s+(\d+\%)$/` |
|
||||
| tap | NodeJs | `tap --coverage-report=text-summary` | `/^Statements\s*:\s*([^%]+)/` |
|
||||
| nyc | NodeJs | `nyc npm test` | `/All files[^|]*\|[^|]*\s+([\d\.]+)/` |
|
||||
| jest | NodeJs | `jest --ci --coverage` | `/All files[^|]*\|[^|]*\s+([\d\.]+)/` |
|
||||
| excoveralls | Elixir | None | `/\[TOTAL\]\s+(\d+\.\d+)%/` |
|
||||
| mix | Elixir | `mix test --cover` | `/\d+.\d+\%\s+\|\s+Total/` |
|
||||
| JaCoCo | Java/Kotlin | None | `/Total.*?([0-9]{1,3})%/` |
|
||||
| go test | Go | `go test -cover` | `/coverage: \d+.\d+% of statements/` |
|
||||
| OpenCover | .NET | None | `/(Visited Points).*\((.*)\)/` |
|
||||
| dotnet test | .NET | `dotnet test` | `/Total\s*\|\s*(\d+(?:\.\d+)?)/` |
|
||||
| tarpaulin | Rust | None | `/^\d+.\d+% coverage/` |
|
||||
| Pester | PowerShell | None | `/Covered (\d+\.\d+%)/` |
|
||||
|
||||
<!-- vale gitlab.Spelling = YES -->
|
||||
<!-- markdownlint-enable MD056 -->
|
||||
|
||||
### View history of project code coverage
|
||||
|
||||
|
|
|
|||
|
|
@ -29,19 +29,22 @@ GitLab has completed FIPS 140-2 Compliance for the build specified in this docum
|
|||
To be compliant, all components (GitLab itself, Gitaly, etc) must be compliant,
|
||||
along with the communication between those components, and any storage used by
|
||||
them. Where functionality cannot be brought into compliance, it must be disabled
|
||||
when FIPS mode is enabled.
|
||||
when FIPS mode is enabled. FIPS-compliant cryptography means that a cryptographic
|
||||
operation leverages a FIPS-validated module.
|
||||
|
||||
### Leveraged Cryptographic modules
|
||||
|
||||
The following list is for reference purposes only and is not dynamically updated. The latest CMVP certificates for the modules below will apply.
|
||||
|
||||
| Cryptographic module name | CMVP number | Instance type | Software component used |
|
||||
|----------------------------------------------------------|-------------------------------------------------------------------------------------------------|---------------|-------------------------|
|
||||
| Ubuntu 20.04 AWS Kernel Crypto API Cryptographic Module | [4132](https://csrc.nist.gov/projects/cryptographic-module-validation-program/certificate/4132) | EC2 | Linux kernel |
|
||||
| Ubuntu 20.04 OpenSSL Cryptographic Module | [3966](https://csrc.nist.gov/projects/cryptographic-module-validation-program/certificate/3966) | EC2 | Gitaly, Rails (Puma/Sidekiq) |
|
||||
| Ubuntu 20.04 Libgcrypt Cryptographic Module | [3902](https://csrc.nist.gov/projects/cryptographic-module-validation-program/certificate/3902) | EC2 instances | `gpg`, `sshd` |
|
||||
| Amazon Linux 2 Kernel Crypto API Cryptographic Module | [3709](https://csrc.nist.gov/projects/cryptographic-module-validation-program/certificate/3709) | EKS nodes | Linux kernel |
|
||||
| Amazon Linux 2 OpenSSL Cryptographic Module | [3553](https://csrc.nist.gov/projects/cryptographic-module-validation-program/certificate/3553) | EKS nodes | NGINX |
|
||||
| RedHat Enterprise Linux 8 OpenSSL Cryptographic Module | [4271](https://csrc.nist.gov/projects/cryptographic-module-validation-program/certificate/4271) | EKS nodes | UBI containers: Workhorse, Pages, container registry, Rails (Puma/Sidekiq), Security Analyzers, `gitlab-sshd` |
|
||||
| RedHat Enterprise Linux 8 Libgcrypt Cryptographic Module | [3784](https://csrc.nist.gov/projects/cryptographic-module-validation-program/certificate/3784) | EKS nodes | UBI containers: GitLab Shell, `gpg` |
|
||||
| Ubuntu 20.04 AWS Kernel Crypto API Cryptographic Module | [4366](https://csrc.nist.gov/projects/cryptographic-module-validation-program/certificate/4366) | EC2 (Omnibus) | Linux kernel |
|
||||
| Ubuntu 20.04 OpenSSL Cryptographic Module | [4292](https://csrc.nist.gov/projects/cryptographic-module-validation-program/certificate/4292) | EC2 (Omnibus) | Gitaly, Rails (Puma/Sidekiq) |
|
||||
| Ubuntu 20.04 Libgcrypt Cryptographic Module | [3902](https://csrc.nist.gov/projects/cryptographic-module-validation-program/certificate/3902) | EC2 (Omnibus) | `gpg`, `sshd` |
|
||||
| Amazon Linux 2 Kernel Crypto API Cryptographic Module | [4593](https://csrc.nist.gov/projects/cryptographic-module-validation-program/certificate/4593) | EKS nodes | Linux kernel |
|
||||
| Amazon Linux 2 OpenSSL Cryptographic Module | [4548](https://csrc.nist.gov/projects/cryptographic-module-validation-program/certificate/4548) | EKS nodes | EKS |
|
||||
| Red Hat Enterprise Linux 8 OpenSSL Cryptographic Module | [4642](https://csrc.nist.gov/projects/cryptographic-module-validation-program/certificate/4642) | GitLab Helm chart | UBI containers: Workhorse, Pages, container registry, Rails (Puma/Sidekiq), Security Analyzers, `gitlab-sshd` |
|
||||
| Red Hat Enterprise Linux 8 Libgcrypt Cryptographic Module | [4438](https://csrc.nist.gov/projects/cryptographic-module-validation-program/certificate/4438) | GitLab Helm chart | UBI containers: GitLab Shell, `gpg` |
|
||||
|
||||
### Supported Operating Systems
|
||||
|
||||
|
|
@ -65,7 +68,6 @@ listed here that also do not work properly in FIPS mode:
|
|||
- [Static Application Security Testing (SAST)](../user/application_security/sast/index.md)
|
||||
supports a reduced set of [analyzers](../user/application_security/sast/index.md#fips-enabled-images)
|
||||
when operating in FIPS-compliant mode.
|
||||
- Advanced search is currently not included in FIPS mode. It must not be enabled to be FIPS-compliant.
|
||||
- [Operational Container Scanning](../user/clusters/agent/vulnerabilities.md).
|
||||
|
||||
Additionally, these package repositories are disabled in FIPS mode:
|
||||
|
|
@ -73,15 +75,23 @@ Additionally, these package repositories are disabled in FIPS mode:
|
|||
- [Conan package repository](../user/packages/conan_repository/index.md).
|
||||
- [Debian package repository](../user/packages/debian_repository/index.md).
|
||||
|
||||
## FIPS validation at GitLab
|
||||
## FIPS compliance vs FIPS validation at GitLab
|
||||
|
||||
Unlike FIPS compliance, FIPS validation is a formal declaration of compliance by
|
||||
an accredited auditor. The requirements needed to pass the audit are the same as
|
||||
for FIPS compliance.
|
||||
GitLab does not fork or modify cryptographic binaries (for example OpenSSL) in its FIPS-compliant
|
||||
software releases but instead uses existing, FIPS-validated crytographic software (modules).
|
||||
GitLab therefore does not need to submit its software through the
|
||||
[NIST Cryptographic Module Valiation Program (CMVP)](https://csrc.nist.gov/projects/cryptographic-module-validation-program/)
|
||||
independent laboratory testing.
|
||||
Instead, GitLab must use FIPS-validated software (listed in
|
||||
[Cryptographic Module Validation Program](https://csrc.nist.gov/projects/cryptographic-module-validation-program/validated-modules))
|
||||
that has an active CMVP certificate and ensure it is enabled for all cryptographic operations.
|
||||
|
||||
A list of FIPS-validated modules can be found at the
|
||||
NIST (National Institute of Standards and Technology)
|
||||
[cryptographic module validation program](https://csrc.nist.gov/projects/cryptographic-module-validation-program/validated-modules).
|
||||
FIPS-compliant cryptography means that a cryptographic operation uses a FIPS-validated module.
|
||||
FIPS mode must be enabled on one or both sides of every communication session (that is, client, server, or both).
|
||||
Enabling FIPS mode on the client ensures that the client uses strong cryptographic algorithms that
|
||||
are compliant with FIPS 140-3/FIPS 140-2 for encryption, hashing, and signing.
|
||||
If FIPS mode is not enabled on the client, it must be implemented on the server-side or application
|
||||
load balancer or proxy server to allow for FIPS-compliant connections to be made.
|
||||
|
||||
## Install GitLab with FIPS compliance
|
||||
|
||||
|
|
@ -91,8 +101,9 @@ a hybrid deployment using elements from both Omnibus and our Cloud Native GitLab
|
|||
|
||||
### Prerequisites
|
||||
|
||||
- Amazon Web Services account. Our first target environment is running on AWS, and uses other FIPS Compliant AWS resources.
|
||||
- Amazon Web Services account. Our first target environment is running on AWS, and uses other FIPS Compliant AWS resources. For many AWS resources, you must use a [FIPS specific endpoint](https://aws.amazon.com/compliance/fips/).
|
||||
- Ability to run Ubuntu 20.04 machines for GitLab. Our first target environment uses the hybrid architecture.
|
||||
- Advanced Search: GitLab does not provide a packaged Elastic or OpenSearch deployment. You must use a FIPS-compliant service or disable Advanced Search.
|
||||
|
||||
### Set up a FIPS-enabled cluster
|
||||
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ Examples:
|
|||
```ruby
|
||||
rule { developer } # Static role check
|
||||
rule { can?(:developer_access) } # Another approach used in some classes
|
||||
rule { role_enables_read_dependency } # Custom role check
|
||||
rule { custom_role_enables_read_dependency } # Custom role check
|
||||
```
|
||||
|
||||
#### Checks Related to the Current User
|
||||
|
|
|
|||
|
|
@ -198,36 +198,14 @@ before in a separate merge request, before completing the below.
|
|||
- For example: if the ability we would like to add is `read_dependency`, then an update to `ee/app/policies/ee/group_policy.rb` would look like as follows:
|
||||
|
||||
```ruby
|
||||
desc "Custom role on group that enables read dependency"
|
||||
condition(:role_enables_read_dependency) do
|
||||
::Auth::MemberRoleAbilityLoader.new(
|
||||
user: @user,
|
||||
resource: @subject,
|
||||
ability: :read_dependency
|
||||
).has_ability?
|
||||
end
|
||||
|
||||
rule { custom_roles_allowed & role_enables_read_dependency }.policy do
|
||||
enable :read_dependency
|
||||
end
|
||||
rule { custom_role_enables_read_dependency }.enable(:read_dependency)
|
||||
```
|
||||
|
||||
- Similarly, If the ability is checked on a project level, add rule(s) to ProjectPolicy to enable the ability.
|
||||
- For example: if the ability we would like to add is `read_dependency`, then an update to `ee/app/policies/ee/project_policy.rb` would look like as follows:
|
||||
|
||||
```ruby
|
||||
desc "Custom role on project that enables read dependency"
|
||||
condition(:role_enables_read_dependency) do
|
||||
::Auth::MemberRoleAbilityLoader.new(
|
||||
user: @user,
|
||||
resource: @subject,
|
||||
ability: :read_dependency
|
||||
).has_ability?
|
||||
end
|
||||
|
||||
rule { custom_roles_allowed & role_enables_read_dependency }.policy do
|
||||
enable :read_dependency
|
||||
end
|
||||
rule { custom_role_enables_read_dependency }.enable(:read_dependency)
|
||||
```
|
||||
|
||||
- Not all abilities need to be enabled on both levels, for instance `admin_terraform_state` allows users to manage a project's terraform state. It only needs to be enabled on the project level and not the group level, and thus only needs to be configured in `ee/app/policies/ee/project_policy.rb`.
|
||||
|
|
|
|||
|
|
@ -235,8 +235,8 @@ Be aware of the following conditions between a vulnerability and a linked GitLab
|
|||
|
||||
Prerequisites:
|
||||
|
||||
- Ensure the Jira issue integration is [configured](../../../integration/jira/configure.md#configure-the-integration) and the
|
||||
**View Jira issues** and **Create Jira issues for vulnerabilities** checkboxes are selected.
|
||||
- Ensure the Jira issue integration is [configured](../../../integration/jira/configure.md#configure-the-integration)
|
||||
and the **Create Jira issues for vulnerabilities** checkbox is selected.
|
||||
|
||||
To link a vulnerability to existing Jira issues, add the following line to the Jira issue's description:
|
||||
|
||||
|
|
|
|||
|
|
@ -53,6 +53,8 @@ specific to a group, including:
|
|||
|
||||
### Create a webhook
|
||||
|
||||
> - **Name** and **Description** [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141977) in GitLab 16.9.
|
||||
|
||||
To create a webhook for a project or group:
|
||||
|
||||
1. In your project or group, on the left sidebar, select **Settings > Webhooks**.
|
||||
|
|
|
|||
|
|
@ -77,7 +77,7 @@ module BitbucketServer
|
|||
end
|
||||
|
||||
def to_hash
|
||||
parent_comment_note = { note: parent_comment.note } if parent_comment
|
||||
parent_comment_note = parent_comment.note if parent_comment
|
||||
|
||||
{
|
||||
id: id,
|
||||
|
|
@ -87,7 +87,7 @@ module BitbucketServer
|
|||
created_at: created_at,
|
||||
updated_at: updated_at,
|
||||
comments: comments.map(&:to_hash),
|
||||
parent_comment: parent_comment_note
|
||||
parent_comment_note: parent_comment_note
|
||||
}
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -74,18 +74,18 @@ module ExtractsRef
|
|||
# that will be handled as well.
|
||||
# rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
def assign_ref_vars
|
||||
@id, @ref, @path = extract_ref_path
|
||||
@repo = repository_container.repository
|
||||
raise InvalidPathError if @ref.match?(/\s/)
|
||||
ref_extractor = ExtractsRef::RefExtractor.new(repository_container, params.permit(:id, :ref, :path, :ref_type))
|
||||
ref_extractor.extract!
|
||||
|
||||
@id = ref_extractor.id
|
||||
@ref = ref_extractor.ref
|
||||
@path = ref_extractor.path
|
||||
@repo = ref_extractor.repo
|
||||
|
||||
return unless @ref.present?
|
||||
|
||||
@commit = if ref_type
|
||||
@fully_qualified_ref = ExtractsRef::RefExtractor.qualify_ref(@ref, ref_type)
|
||||
@repo.commit(@fully_qualified_ref)
|
||||
else
|
||||
@repo.commit(@ref)
|
||||
end
|
||||
@commit = ref_extractor.commit
|
||||
@fully_qualified_ref = ref_extractor.fully_qualified_ref
|
||||
end
|
||||
# rubocop:enable Gitlab/ModuleWithInstanceVariables
|
||||
|
||||
|
|
@ -93,13 +93,6 @@ module ExtractsRef
|
|||
@tree ||= @repo.tree(@commit.id, @path) # rubocop:disable Gitlab/ModuleWithInstanceVariables
|
||||
end
|
||||
|
||||
def extract_ref_path
|
||||
id = get_id
|
||||
ref, path = extract_ref(id)
|
||||
|
||||
[id, ref, path]
|
||||
end
|
||||
|
||||
def ref_type
|
||||
ExtractsRef.ref_type(params[:ref_type])
|
||||
end
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ module ExtractsRef
|
|||
REF_TYPES = [BRANCH_REF_TYPE, TAG_REF_TYPE].freeze
|
||||
|
||||
attr_reader :repository_container, :params
|
||||
attr_accessor :id, :ref, :commit, :path, :fully_qualified_ref
|
||||
attr_accessor :id, :ref, :commit, :path, :fully_qualified_ref, :repo
|
||||
|
||||
class << self
|
||||
def ref_type(type)
|
||||
|
|
@ -37,7 +37,7 @@ module ExtractsRef
|
|||
|
||||
def initialize(repository_container, params, override_id: nil)
|
||||
@repository_container = repository_container
|
||||
@params = params.extract!(:id, :ref, :path, :ref_type)
|
||||
@params = params.slice(:id, :ref, :path, :ref_type)
|
||||
@override_id = override_id
|
||||
end
|
||||
|
||||
|
|
@ -129,7 +129,7 @@ module ExtractsRef
|
|||
return ['', ''] unless repository_container
|
||||
|
||||
# If the ref appears to be a SHA, we're done, just split the string
|
||||
return $~.captures if id =~ /^(\h{40})(.+)/
|
||||
return $~.captures if id =~ /^(\h{40}\h{24}?)(.*)/
|
||||
|
||||
# No slash means we must have a ref and no path
|
||||
return [id, ''] unless id.include?('/')
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ module Gitlab
|
|||
|
||||
job_delay = calculate_job_delay(job_waiter.jobs_remaining)
|
||||
|
||||
sidekiq_worker_class.perform_in(job_delay, project.id, { iid: merge_request.iid }, job_waiter.key)
|
||||
sidekiq_worker_class_batch.perform_in(job_delay, project.id, { iid: merge_request.iid }, job_waiter.key)
|
||||
|
||||
mark_as_processed(merge_request)
|
||||
end
|
||||
|
|
@ -82,15 +82,19 @@ module Gitlab
|
|||
comment_id: comment.id,
|
||||
comment: comment.to_hash.deep_stringify_keys
|
||||
}
|
||||
sidekiq_worker_class.perform_in(job_delay, project.id, object_hash, job_waiter.key)
|
||||
sidekiq_worker_class_individual.perform_in(job_delay, project.id, object_hash, job_waiter.key)
|
||||
|
||||
mark_as_processed(comment)
|
||||
end
|
||||
|
||||
def sidekiq_worker_class
|
||||
def sidekiq_worker_class_batch
|
||||
ImportPullRequestNotesWorker
|
||||
end
|
||||
|
||||
def sidekiq_worker_class_individual
|
||||
ImportPullRequestNoteWorker
|
||||
end
|
||||
|
||||
def id_for_already_processed_cache(object)
|
||||
# :iid is used for the `import_notes_in_batch` which uses `merge_request` as the `object`
|
||||
# it can be cleaned up after `import_notes_in_batch` is removed
|
||||
|
|
|
|||
|
|
@ -0,0 +1,66 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BitbucketServerImport
|
||||
module Importers
|
||||
class PullRequestNoteImporter
|
||||
include Loggable
|
||||
|
||||
def initialize(project, hash)
|
||||
@project = project
|
||||
@object = hash.with_indifferent_access
|
||||
end
|
||||
|
||||
def execute
|
||||
return unless import_data_valid?
|
||||
|
||||
log_info(import_stage: 'import_pull_request_note', message: 'starting', iid: object[:iid])
|
||||
|
||||
import
|
||||
|
||||
log_info(import_stage: 'import_pull_request_note', message: 'finished', iid: object[:iid])
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :object, :project
|
||||
|
||||
def import
|
||||
merge_request = project.merge_requests.find_by(iid: object[:iid]) # rubocop: disable CodeReuse/ActiveRecord -- no need to move this to ActiveRecord model
|
||||
if merge_request.nil?
|
||||
log_info(import_stage: 'import_pull_request_note', message: 'skipped', iid: object[:iid])
|
||||
|
||||
return
|
||||
end
|
||||
|
||||
importer = notes_importer_class(object[:comment_type])
|
||||
if importer
|
||||
importer.new(project, merge_request).execute(object[:comment])
|
||||
else
|
||||
log_debug(
|
||||
message: 'UNSUPPORTED_EVENT_TYPE',
|
||||
comment_type: object[:comment_type], comment_id: object[:comment_id]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
def notes_importer_class(comment_type)
|
||||
case comment_type
|
||||
when 'merge_event'
|
||||
Gitlab::BitbucketServerImport::Importers::PullRequestNotes::MergeEvent
|
||||
when 'inline'
|
||||
Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Inline
|
||||
when 'standalone_notes'
|
||||
Gitlab::BitbucketServerImport::Importers::PullRequestNotes::StandaloneNotes
|
||||
when 'approved_event'
|
||||
Gitlab::BitbucketServerImport::Importers::PullRequestNotes::ApprovedEvent
|
||||
end
|
||||
end
|
||||
|
||||
def import_data_valid?
|
||||
project.import_data&.credentials && project.import_data&.data
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BitbucketServerImport
|
||||
module Importers
|
||||
module PullRequestNotes
|
||||
class ApprovedEvent < BaseImporter
|
||||
include ::Gitlab::Import::MergeRequestHelpers
|
||||
|
||||
def execute(approved_event)
|
||||
log_info(
|
||||
import_stage: 'import_approved_event',
|
||||
message: 'starting',
|
||||
iid: merge_request.iid,
|
||||
event_id: approved_event[:id]
|
||||
)
|
||||
|
||||
user_id = user_finder.find_user_id(by: :username, value: approved_event[:approver_username]) ||
|
||||
user_finder.find_user_id(by: :email, value: approved_event[:approver_email])
|
||||
|
||||
if user_id.nil?
|
||||
log_info(
|
||||
import_stage: 'import_approved_event',
|
||||
message: 'skipped due to missing user',
|
||||
iid: merge_request.iid,
|
||||
event_id: approved_event[:id]
|
||||
)
|
||||
|
||||
return
|
||||
end
|
||||
|
||||
submitted_at = approved_event[:created_at] || merge_request[:updated_at]
|
||||
|
||||
create_approval!(project.id, merge_request.id, user_id, submitted_at)
|
||||
create_reviewer!(merge_request.id, user_id, submitted_at)
|
||||
|
||||
log_info(
|
||||
import_stage: 'import_approved_event',
|
||||
message: 'finished',
|
||||
iid: merge_request.iid,
|
||||
event_id: approved_event[:id]
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BitbucketServerImport
|
||||
module Importers
|
||||
module PullRequestNotes
|
||||
# Base class for importing pull request notes during project import from Bitbucket Server
|
||||
class BaseImporter
|
||||
include Loggable
|
||||
|
||||
# @param project [Project]
|
||||
# @param merge_request [MergeRequest]
|
||||
def initialize(project, merge_request)
|
||||
@project = project
|
||||
@user_finder = UserFinder.new(project)
|
||||
@formatter = Gitlab::ImportFormatter.new
|
||||
@mentions_converter = Gitlab::Import::MentionsConverter.new('bitbucket_server', project)
|
||||
@merge_request = merge_request
|
||||
end
|
||||
|
||||
def execute(_args)
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :project, :user_finder, :merge_request, :mentions_converter
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BitbucketServerImport
|
||||
module Importers
|
||||
module PullRequestNotes
|
||||
class BaseNoteDiffImporter < BaseImporter
|
||||
PARENT_COMMENT_CONTEXT_LENGTH = 80
|
||||
|
||||
def build_position(merge_request, pr_comment)
|
||||
params = {
|
||||
diff_refs: merge_request.diff_refs,
|
||||
old_path: pr_comment[:file_path],
|
||||
new_path: pr_comment[:file_path],
|
||||
old_line: pr_comment[:old_pos],
|
||||
new_line: pr_comment[:new_pos]
|
||||
}
|
||||
|
||||
Gitlab::Diff::Position.new(params)
|
||||
end
|
||||
|
||||
def create_diff_note(merge_request, comment, position, discussion_id = nil)
|
||||
attributes = pull_request_comment_attributes(comment)
|
||||
attributes.merge!(position: position, type: 'DiffNote')
|
||||
attributes[:discussion_id] = discussion_id if discussion_id
|
||||
|
||||
note = merge_request.notes.build(attributes)
|
||||
|
||||
if note.valid?
|
||||
note.save
|
||||
return note
|
||||
end
|
||||
|
||||
log_info(
|
||||
import_stage: 'create_diff_note',
|
||||
message: 'creating standalone fallback for DiffNote',
|
||||
iid: merge_request.iid,
|
||||
comment_id: comment[:id]
|
||||
)
|
||||
|
||||
# Bitbucket Server supports the ability to comment on any line, not just the
|
||||
# line in the diff. If we can't add the note as a DiffNote, fallback to creating
|
||||
# a regular note.
|
||||
create_basic_fallback_note(merge_request, comment, position)
|
||||
rescue StandardError => e
|
||||
Gitlab::ErrorTracking.log_exception(
|
||||
e,
|
||||
import_stage: 'create_diff_note', comment_id: comment[:id], error: e.message
|
||||
)
|
||||
|
||||
nil
|
||||
end
|
||||
|
||||
def pull_request_comment_attributes(comment)
|
||||
author = user_finder.uid(comment)
|
||||
note = ''
|
||||
|
||||
unless author
|
||||
author = project.creator_id
|
||||
note = "*By #{comment[:author_username]} (#{comment[:author_email]})*\n\n"
|
||||
end
|
||||
|
||||
comment_note = mentions_converter.convert(comment[:note])
|
||||
|
||||
note +=
|
||||
# Provide some context for replying
|
||||
if comment[:parent_comment_note]
|
||||
"> #{comment[:parent_comment_note].truncate(PARENT_COMMENT_CONTEXT_LENGTH)}\n\n#{comment_note}"
|
||||
else
|
||||
comment_note
|
||||
end
|
||||
|
||||
{
|
||||
project: project,
|
||||
note: note,
|
||||
author_id: author,
|
||||
created_at: comment[:created_at],
|
||||
updated_at: comment[:updated_at]
|
||||
}
|
||||
end
|
||||
|
||||
def create_basic_fallback_note(merge_request, comment, position)
|
||||
attributes = pull_request_comment_attributes(comment)
|
||||
note = "*Comment on"
|
||||
|
||||
note += " #{position.old_path}:#{position.old_line} -->" if position.old_line
|
||||
note += " #{position.new_path}:#{position.new_line}" if position.new_line
|
||||
note += "*\n\n#{comment[:note]}"
|
||||
|
||||
attributes[:note] = note
|
||||
merge_request.notes.create!(attributes)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BitbucketServerImport
|
||||
module Importers
|
||||
module PullRequestNotes
|
||||
class Inline < BaseNoteDiffImporter
|
||||
def execute(comment)
|
||||
log_info(
|
||||
import_stage: 'import_inline_comments',
|
||||
message: 'starting',
|
||||
iid: merge_request.iid,
|
||||
comment_id: comment[:id]
|
||||
)
|
||||
|
||||
position = build_position(merge_request, comment)
|
||||
parent = create_diff_note(merge_request, comment, position)
|
||||
|
||||
return unless parent&.persisted?
|
||||
|
||||
discussion_id = parent.discussion_id
|
||||
|
||||
comment[:comments].each do |reply|
|
||||
create_diff_note(merge_request, reply, position, discussion_id)
|
||||
end
|
||||
|
||||
log_info(
|
||||
import_stage: 'import_inline_comments',
|
||||
message: 'finished',
|
||||
iid: merge_request.iid,
|
||||
comment_id: comment[:id]
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BitbucketServerImport
|
||||
module Importers
|
||||
module PullRequestNotes
|
||||
class MergeEvent < BaseImporter
|
||||
def execute(merge_event)
|
||||
log_info(
|
||||
import_stage: 'import_merge_event',
|
||||
message: 'starting',
|
||||
iid: merge_request.iid,
|
||||
event_id: merge_event[:id]
|
||||
)
|
||||
|
||||
committer = merge_event[:committer_email]
|
||||
|
||||
user_id = user_finder.find_user_id(by: :email, value: committer) || project.creator_id
|
||||
timestamp = merge_event[:merge_timestamp]
|
||||
merge_request.update({ merge_commit_sha: merge_event[:merge_commit] })
|
||||
metric = MergeRequest::Metrics.find_or_initialize_by(merge_request: merge_request) # rubocop: disable CodeReuse/ActiveRecord -- no need to move this to ActiveRecord model
|
||||
metric.update(merged_by_id: user_id, merged_at: timestamp)
|
||||
|
||||
log_info(
|
||||
import_stage: 'import_merge_event',
|
||||
message: 'finished',
|
||||
iid: merge_request.iid,
|
||||
event_id: merge_event[:id]
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BitbucketServerImport
|
||||
module Importers
|
||||
module PullRequestNotes
|
||||
class StandaloneNotes < BaseNoteDiffImporter
|
||||
def execute(comment)
|
||||
log_info(
|
||||
import_stage: 'import_standalone_notes_comments',
|
||||
message: 'starting',
|
||||
iid: merge_request.iid,
|
||||
comment_id: comment[:id]
|
||||
)
|
||||
|
||||
merge_request.notes.create!(pull_request_comment_attributes(comment))
|
||||
|
||||
comment[:comments].each do |reply|
|
||||
merge_request.notes.create!(pull_request_comment_attributes(reply))
|
||||
end
|
||||
rescue StandardError => e
|
||||
Gitlab::ErrorTracking.log_exception(
|
||||
e,
|
||||
import_stage: 'import_standalone_notes_comments',
|
||||
merge_request_id: merge_request.id,
|
||||
comment_id: comment[:id],
|
||||
error: e.message
|
||||
)
|
||||
ensure
|
||||
log_info(
|
||||
import_stage: 'import_standalone_notes_comments',
|
||||
message: 'finished',
|
||||
iid: merge_request.iid,
|
||||
comment_id: comment[:id]
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -22,16 +22,7 @@ module Gitlab
|
|||
|
||||
merge_request = project.merge_requests.find_by(iid: object[:iid]) # rubocop: disable CodeReuse/ActiveRecord
|
||||
|
||||
if merge_request
|
||||
bitbucket_server_notes_separate_worker_enabled =
|
||||
project.import_data&.data&.dig('bitbucket_server_notes_separate_worker')
|
||||
|
||||
if bitbucket_server_notes_separate_worker_enabled
|
||||
import_notes_individually(merge_request, object)
|
||||
else
|
||||
import_notes_in_batch(merge_request)
|
||||
end
|
||||
end
|
||||
import_notes_in_batch(merge_request) if merge_request
|
||||
|
||||
log_info(import_stage: 'import_pull_request_notes', message: 'finished', iid: object[:iid])
|
||||
end
|
||||
|
|
@ -40,27 +31,6 @@ module Gitlab
|
|||
|
||||
attr_reader :object, :project, :formatter, :user_finder, :mentions_converter
|
||||
|
||||
def import_notes_individually(merge_request, object)
|
||||
# We should not use "OpenStruct"
|
||||
# currently it is used under development feature flag
|
||||
object_representation = Gitlab::Json.parse(
|
||||
object[:comment].to_json,
|
||||
symbolize_names: true,
|
||||
object_class: 'OpenStruct'.constantize
|
||||
)
|
||||
|
||||
case object[:comment_type]
|
||||
when 'merge_event'
|
||||
import_merge_event(merge_request, object_representation)
|
||||
when 'inline'
|
||||
import_inline_comments([object_representation], merge_request)
|
||||
when 'standalone_notes'
|
||||
import_standalone_pr_comments([object_representation], merge_request)
|
||||
when 'approved_event'
|
||||
import_approved_event(merge_request, object_representation)
|
||||
end
|
||||
end
|
||||
|
||||
def import_notes_in_batch(merge_request)
|
||||
activities = client.activities(project_key, repository_slug, merge_request.iid)
|
||||
|
||||
|
|
|
|||
|
|
@ -33055,6 +33055,9 @@ msgstr ""
|
|||
msgid "Model registry"
|
||||
msgstr ""
|
||||
|
||||
msgid "Model version not found"
|
||||
msgstr ""
|
||||
|
||||
msgid "Model was successfully deleted"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -35079,6 +35082,9 @@ msgstr ""
|
|||
msgid "ObservabilityLogs|Failed to load logs."
|
||||
msgstr ""
|
||||
|
||||
msgid "ObservabilityLogs|Failed to load metadata."
|
||||
msgstr ""
|
||||
|
||||
msgid "ObservabilityLogs|Fingerprint"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -36354,6 +36360,9 @@ msgstr ""
|
|||
msgid "Package type must be Terraform Module"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|%{count} packages were not published to the registry. Remove these packages and try again."
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|%{name} version %{version} was first created %{datetime}"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -36396,9 +36405,6 @@ msgstr ""
|
|||
msgid "PackageRegistry|App name: %{name}"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|Are you sure you want to delete the package protection rule?"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|Author email: %{authorEmail}"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -36516,15 +36522,18 @@ msgstr ""
|
|||
msgid "PackageRegistry|Delete package asset"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|Delete package protection rule"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|Delete package protection rule?"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|Delete package version"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|Delete packages"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|Delete rule"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|Delete selected"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -36797,6 +36806,9 @@ msgstr ""
|
|||
msgid "PackageRegistry|Show Yarn commands"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|Show packages with errors"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|Skip metadata URL validation for the NuGet package"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -36860,6 +36872,9 @@ msgstr ""
|
|||
msgid "PackageRegistry|There was a timeout and the package was not published. Delete this package and try again."
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|There was an error publishing %{count} packages"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|There was an error publishing a %{packageName} package"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -36878,7 +36893,7 @@ msgstr ""
|
|||
msgid "PackageRegistry|Unable to load package"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|Users with at least the Developer role for this project will be able to publish, edit, and delete packages."
|
||||
msgid "PackageRegistry|Users with at least the Developer role for this project will be able to publish, edit, and delete packages with this package name."
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|Validate these URLs manually to ensure malicious packages are not uploaded to the NuGet package registry. Selecting and clearing the checkbox might lead to invalid records in the package registry that you cannot update."
|
||||
|
|
@ -36908,6 +36923,9 @@ msgstr ""
|
|||
msgid "PackageRegistry|You are about to delete %{name}, are you sure?"
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|You are about to delete the package protection rule for %{packageNamePattern}."
|
||||
msgstr ""
|
||||
|
||||
msgid "PackageRegistry|You are about to delete version %{version} of %{name}. Are you sure?"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -44,12 +44,52 @@ RSpec.describe ApplicationCable::Connection, :clean_gitlab_redis_sessions do
|
|||
end
|
||||
|
||||
context 'when bearer header is provided' do
|
||||
let(:user_pat) { create(:personal_access_token) }
|
||||
context 'when it is a personal_access_token' do
|
||||
let(:user_pat) { create(:personal_access_token) }
|
||||
|
||||
it 'finds user by PAT' do
|
||||
connect(ActionCable.server.config.mount_path, headers: { Authorization: "Bearer #{user_pat.token}" })
|
||||
it 'finds user by PAT' do
|
||||
connect(ActionCable.server.config.mount_path, headers: { Authorization: "Bearer #{user_pat.token}" })
|
||||
|
||||
expect(connection.current_user).to eq(user_pat.user)
|
||||
expect(connection.current_user).to eq(user_pat.user)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when it is an OAuth access token' do
|
||||
context 'when it is a valid OAuth access token' do
|
||||
let(:user) { create(:user) }
|
||||
|
||||
let(:application) do
|
||||
Doorkeeper::Application.create!(name: "MyApp", redirect_uri: "https://app.com", owner: user)
|
||||
end
|
||||
|
||||
let(:oauth_token) do
|
||||
create(:oauth_access_token,
|
||||
application_id: application.id,
|
||||
resource_owner_id: user.id,
|
||||
scopes: "api"
|
||||
)
|
||||
end
|
||||
|
||||
it 'finds user by OAuth access token' do
|
||||
connect(ActionCable.server.config.mount_path, headers: {
|
||||
'Authorization' => "Bearer #{oauth_token.plaintext_token}"
|
||||
})
|
||||
|
||||
expect(connection.current_user).to eq(oauth_token.user)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when it is an invalid OAuth access token' do
|
||||
it 'sets the current_user as `nil`, and rejects the connection' do
|
||||
expect do
|
||||
connect(ActionCable.server.config.mount_path, headers: {
|
||||
'Authorization' => "Bearer invalid_token"
|
||||
})
|
||||
end.to have_rejected_connection
|
||||
|
||||
expect(connection.current_user).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ export function createMockClient() {
|
|||
metricsSearchUrl: 'metrics-search-url',
|
||||
metricsSearchMetadataUrl: 'metrics-search-metadata-url',
|
||||
logsSearchUrl: 'logs-search-url',
|
||||
logsSearchMetadataUrl: 'logs-search-metadata-url',
|
||||
});
|
||||
|
||||
Object.getOwnPropertyNames(mockClient)
|
||||
|
|
|
|||
|
|
@ -16,10 +16,11 @@ describe('Blob Header Default Actions', () => {
|
|||
|
||||
const blobHash = 'foo-bar';
|
||||
|
||||
function createComponent(propsData = {}) {
|
||||
function createComponent(propsData = {}, provided = {}) {
|
||||
wrapper = shallowMountExtended(BlobHeaderActions, {
|
||||
provide: {
|
||||
blobHash,
|
||||
...provided,
|
||||
},
|
||||
propsData: {
|
||||
rawPath: Blob.rawPath,
|
||||
|
|
@ -37,6 +38,7 @@ describe('Blob Header Default Actions', () => {
|
|||
describe('renders', () => {
|
||||
const findCopyButton = () => wrapper.findByTestId('copy-contents-button');
|
||||
const findViewRawButton = () => wrapper.findByTestId('viewRawButton');
|
||||
const findDownloadButton = () => wrapper.findByTestId('download-button');
|
||||
|
||||
it('gl-button-group component', () => {
|
||||
expect(btnGroup.exists()).toBe(true);
|
||||
|
|
@ -85,6 +87,12 @@ describe('Blob Header Default Actions', () => {
|
|||
expect(findViewRawButton().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('does not render the download button if canDownloadCode is set to false', () => {
|
||||
createComponent({}, { canDownloadCode: false });
|
||||
|
||||
expect(findDownloadButton().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('emits a copy event if overrideCopy is set to true', () => {
|
||||
createComponent({ overrideCopy: true });
|
||||
findCopyButton().vm.$emit('click');
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import {
|
||||
GlEmptyState,
|
||||
GlLoadingIcon,
|
||||
GlForm,
|
||||
GlFormInput,
|
||||
GlPagination,
|
||||
GlDropdown,
|
||||
|
|
@ -206,6 +207,7 @@ describe('ErrorTrackingList', () => {
|
|||
|
||||
describe('filtering', () => {
|
||||
const findSearchBox = () => wrapper.findComponent(GlFormInput);
|
||||
const findGlForm = () => wrapper.findComponent(GlForm);
|
||||
|
||||
it('shows search box & sort dropdown', () => {
|
||||
expect(findSearchBox().exists()).toBe(true);
|
||||
|
|
@ -214,7 +216,7 @@ describe('ErrorTrackingList', () => {
|
|||
|
||||
it('searches by query', () => {
|
||||
findSearchBox().vm.$emit('input', 'search');
|
||||
findSearchBox().trigger('keyup.enter');
|
||||
findGlForm().vm.$emit('submit', { preventDefault: () => {} });
|
||||
expect(actions.searchByQuery.mock.calls[0][1]).toBe('search');
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ describe('buildClient', () => {
|
|||
const metricsSearchUrl = 'https://example.com/metrics/search';
|
||||
const metricsSearchMetadataUrl = 'https://example.com/metrics/searchmetadata';
|
||||
const logsSearchUrl = 'https://example.com/metrics/logs/search';
|
||||
const logsSearchMetadataUrl = 'https://example.com/metrics/logs/search';
|
||||
const FETCHING_TRACES_ERROR = 'traces are missing/invalid in the response';
|
||||
|
||||
const apiConfig = {
|
||||
|
|
@ -34,6 +35,7 @@ describe('buildClient', () => {
|
|||
metricsSearchUrl,
|
||||
metricsSearchMetadataUrl,
|
||||
logsSearchUrl,
|
||||
logsSearchMetadataUrl,
|
||||
};
|
||||
|
||||
const getQueryParam = () => decodeURIComponent(axios.get.mock.calls[0][1].params.toString());
|
||||
|
|
|
|||
|
|
@ -1,11 +1,13 @@
|
|||
import { GlAlert, GlSprintf } from '@gitlab/ui';
|
||||
import { GlAlert, GlButton } from '@gitlab/ui';
|
||||
import { nextTick } from 'vue';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import { TEST_HOST } from 'spec/test_constants';
|
||||
import { stubComponent } from 'helpers/stub_component';
|
||||
import PackagesListRow from '~/packages_and_registries/package_registry/components/list/package_list_row.vue';
|
||||
import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue';
|
||||
import DeleteModal from '~/packages_and_registries/package_registry/components/delete_modal.vue';
|
||||
import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
|
||||
import setWindowLocation from 'helpers/set_window_location_helper';
|
||||
import {
|
||||
DELETE_PACKAGE_TRACKING_ACTION,
|
||||
DELETE_PACKAGES_TRACKING_ACTION,
|
||||
|
|
@ -51,11 +53,12 @@ describe('packages_list', () => {
|
|||
const findRegistryList = () => wrapper.findComponent(RegistryList);
|
||||
const findPackagesListRow = () => wrapper.findComponent(PackagesListRow);
|
||||
const findErrorPackageAlert = () => wrapper.findComponent(GlAlert);
|
||||
const findErrorAlertButton = () => findErrorPackageAlert().findComponent(GlButton);
|
||||
const findDeletePackagesModal = () => wrapper.findComponent(DeleteModal);
|
||||
|
||||
const showMock = jest.fn();
|
||||
|
||||
const mountComponent = ({ props = {}, provide = defaultProvide } = {}) => {
|
||||
const mountComponent = ({ props = {}, provide = defaultProvide, stubs = {} } = {}) => {
|
||||
wrapper = shallowMountExtended(PackagesList, {
|
||||
provide,
|
||||
propsData: {
|
||||
|
|
@ -68,8 +71,7 @@ describe('packages_list', () => {
|
|||
show: showMock,
|
||||
},
|
||||
}),
|
||||
GlSprintf,
|
||||
RegistryList,
|
||||
...stubs,
|
||||
},
|
||||
slots: {
|
||||
'empty-state': EmptySlotStub,
|
||||
|
|
@ -97,7 +99,7 @@ describe('packages_list', () => {
|
|||
|
||||
describe('when is not loading', () => {
|
||||
beforeEach(() => {
|
||||
mountComponent();
|
||||
mountComponent({ stubs: { RegistryList } });
|
||||
});
|
||||
|
||||
it('does not show skeleton loader', () => {
|
||||
|
|
@ -161,7 +163,7 @@ describe('packages_list', () => {
|
|||
|
||||
beforeEach(() => {
|
||||
eventSpy = jest.spyOn(Tracking, 'event');
|
||||
mountComponent();
|
||||
mountComponent({ stubs: { RegistryList } });
|
||||
finderFunction().vm.$emit('delete', deletePayload);
|
||||
});
|
||||
|
||||
|
|
@ -277,8 +279,6 @@ describe('packages_list', () => {
|
|||
describe('when an error package is present', () => {
|
||||
beforeEach(() => {
|
||||
mountComponent({ props: { list: [firstPackage, errorPackage] } });
|
||||
|
||||
return nextTick();
|
||||
});
|
||||
|
||||
it('should display an alert with default body message', () => {
|
||||
|
|
@ -291,13 +291,11 @@ describe('packages_list', () => {
|
|||
);
|
||||
});
|
||||
|
||||
it('should display alert body with message set in `statusMessage`', async () => {
|
||||
it('should display alert body with message set in `statusMessage`', () => {
|
||||
mountComponent({
|
||||
props: { list: [firstPackage, { ...errorPackage, statusMessage: 'custom error message' }] },
|
||||
});
|
||||
|
||||
await nextTick();
|
||||
|
||||
expect(findErrorPackageAlert().exists()).toBe(true);
|
||||
expect(findErrorPackageAlert().props('title')).toBe(
|
||||
'There was an error publishing a error package package',
|
||||
|
|
@ -305,14 +303,72 @@ describe('packages_list', () => {
|
|||
expect(findErrorPackageAlert().text()).toBe('custom error message');
|
||||
});
|
||||
|
||||
it('should display the deletion modal when clicked on the confirm button', async () => {
|
||||
findErrorPackageAlert().vm.$emit('primaryAction');
|
||||
describe('`Delete this package` button', () => {
|
||||
beforeEach(() => {
|
||||
mountComponent({ props: { list: [firstPackage, errorPackage] }, stubs: { GlAlert } });
|
||||
});
|
||||
|
||||
await nextTick();
|
||||
it('displays the button within the alert', () => {
|
||||
expect(findErrorAlertButton().text()).toBe('Delete this package');
|
||||
});
|
||||
|
||||
expect(showMock).toHaveBeenCalledTimes(1);
|
||||
it('should display the deletion modal when clicked on the `Delete this package` button', async () => {
|
||||
findErrorAlertButton().vm.$emit('click');
|
||||
|
||||
expect(findDeletePackagesModal().props('itemsToBeDeleted')).toStrictEqual([errorPackage]);
|
||||
await nextTick();
|
||||
|
||||
expect(showMock).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(findDeletePackagesModal().props('itemsToBeDeleted')).toStrictEqual([errorPackage]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when `hideErrorAlert` is true', () => {
|
||||
beforeEach(() => {
|
||||
mountComponent({
|
||||
props: { list: [firstPackage, errorPackage], hideErrorAlert: true },
|
||||
});
|
||||
});
|
||||
|
||||
it('does not display alert message', () => {
|
||||
expect(findErrorPackageAlert().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when multiple error packages are present', () => {
|
||||
beforeEach(() => {
|
||||
mountComponent({
|
||||
props: { list: [{ ...firstPackage, status: errorPackage.status }, errorPackage] },
|
||||
});
|
||||
});
|
||||
|
||||
it('should display an alert with default body message', () => {
|
||||
expect(findErrorPackageAlert().props('title')).toBe(
|
||||
'There was an error publishing 2 packages',
|
||||
);
|
||||
expect(findErrorPackageAlert().text()).toBe(
|
||||
'2 packages were not published to the registry. Remove these packages and try again.',
|
||||
);
|
||||
});
|
||||
|
||||
describe('`Show packages with errors` button', () => {
|
||||
beforeEach(() => {
|
||||
setWindowLocation(`${TEST_HOST}/foo?type=maven&after=1234`);
|
||||
mountComponent({
|
||||
props: {
|
||||
list: [{ ...firstPackage, status: errorPackage.status }, errorPackage],
|
||||
},
|
||||
stubs: { GlAlert },
|
||||
});
|
||||
});
|
||||
|
||||
it('is shown with correct href within the alert', () => {
|
||||
expect(findErrorAlertButton().text()).toBe('Show packages with errors');
|
||||
expect(findErrorAlertButton().attributes('href')).toBe(
|
||||
`${TEST_HOST}/foo?type=maven&status=error`,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -147,7 +147,7 @@ describe('Package Search', () => {
|
|||
filters: [
|
||||
{ type: 'type', value: { data: 'Generic', operator: '=' }, id: 'token-3' },
|
||||
{ type: 'version', value: { data: '1.0.1', operator: '=' }, id: 'token-6' },
|
||||
{ type: 'status', value: { data: 'HIDDEN', operator: '=' }, id: 'token-7' },
|
||||
{ type: 'status', value: { data: 'hidden', operator: '=' }, id: 'token-7' },
|
||||
{ id: 'token-4', type: 'filtered-search-term', value: { data: 'gl' } },
|
||||
{ id: 'token-5', type: 'filtered-search-term', value: { data: '' } },
|
||||
],
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@ describe('PackagesListApp', () => {
|
|||
|
||||
const waitForFirstRequest = () => {
|
||||
// emit a search update so the query is executed
|
||||
findSearch().vm.$emit('update', { sort: 'NAME_DESC', filters: [] });
|
||||
findSearch().vm.$emit('update', { sort: 'NAME_DESC', filters: {} });
|
||||
return waitForPromises();
|
||||
};
|
||||
|
||||
|
|
@ -205,10 +205,13 @@ describe('PackagesListApp', () => {
|
|||
});
|
||||
|
||||
it('exists and has the right props', async () => {
|
||||
await waitForFirstRequest();
|
||||
findSearch().vm.$emit('update', searchPayload);
|
||||
await waitForPromises();
|
||||
|
||||
expect(findListComponent().props()).toMatchObject({
|
||||
list: expect.arrayContaining([expect.objectContaining({ id: packageData().id })]),
|
||||
isLoading: false,
|
||||
hideErrorAlert: false,
|
||||
groupSettings: expect.objectContaining({
|
||||
mavenPackageRequestsForwarding: true,
|
||||
npmPackageRequestsForwarding: true,
|
||||
|
|
@ -217,6 +220,17 @@ describe('PackagesListApp', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('when packageStatus filter is set to error', () => {
|
||||
beforeEach(async () => {
|
||||
findSearch().vm.$emit('update', { filters: { packageStatus: 'error' } });
|
||||
await nextTick();
|
||||
});
|
||||
|
||||
it('sets hideErrorAlert prop', () => {
|
||||
expect(findListComponent().props('hideErrorAlert')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('when pagination emits next event fetches the next set of records', async () => {
|
||||
await waitForFirstRequest();
|
||||
findPagination().vm.$emit('next');
|
||||
|
|
|
|||
|
|
@ -35,8 +35,7 @@ describe('Packages protection rules project settings', () => {
|
|||
extendedWrapper(wrapper.findByRole('table', { name: /protected packages/i }));
|
||||
const findTableBody = () => extendedWrapper(findTable().findAllByRole('rowgroup').at(1));
|
||||
const findTableRow = (i) => extendedWrapper(findTableBody().findAllByRole('row').at(i));
|
||||
const findTableRowButtonDelete = (i) =>
|
||||
findTableRow(i).findByRole('button', { name: /delete rule/i });
|
||||
const findTableRowButtonDelete = (i) => findTableRow(i).findByRole('button', { name: /delete/i });
|
||||
const findTableLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
|
||||
const findProtectionRuleForm = () => wrapper.findComponent(PackagesProtectionRuleForm);
|
||||
const findAddProtectionRuleButton = () =>
|
||||
|
|
@ -426,19 +425,19 @@ describe('Packages protection rules project settings', () => {
|
|||
});
|
||||
|
||||
describe('when button is clicked', () => {
|
||||
it('binds modal "confirmation for delete action"', async () => {
|
||||
it('renders the "delete container protection rule" confirmation modal', async () => {
|
||||
createComponent();
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
await findTableRowButtonDelete(0).trigger('click');
|
||||
|
||||
const modalId = getBinding(findTableRowButtonDelete(0).element, 'gl-modal');
|
||||
|
||||
expect(findModal().props('modal-id')).toBe(modalId);
|
||||
expect(findModal().props('title')).toBe(
|
||||
'Are you sure you want to delete the package protection rule?',
|
||||
);
|
||||
expect(findModal().text()).toBe(
|
||||
'Users with at least the Developer role for this project will be able to publish, edit, and delete packages.',
|
||||
expect(findModal().props('title')).toBe('Delete package protection rule?');
|
||||
expect(findModal().text()).toContain(
|
||||
'Users with at least the Developer role for this project will be able to publish, edit, and delete packages with this package name.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -408,9 +408,26 @@ RSpec.describe BlobHelper do
|
|||
resource_id: project.to_global_id,
|
||||
user_id: user.to_global_id,
|
||||
target_branch: ref,
|
||||
original_branch: ref
|
||||
original_branch: ref,
|
||||
can_download_code: 'false'
|
||||
})
|
||||
end
|
||||
|
||||
context 'when a user can download code' do
|
||||
let_it_be(:user) { build_stubbed(:user) }
|
||||
|
||||
before do
|
||||
allow(helper).to receive(:current_user).and_return(user)
|
||||
allow(Ability).to receive(:allowed?).and_call_original
|
||||
allow(Ability).to receive(:allowed?).with(user, :download_code, project).and_return(true)
|
||||
end
|
||||
|
||||
it 'returns true for `can_download_code` value' do
|
||||
expect(helper.vue_blob_app_data(project, blob, ref)).to include(
|
||||
can_download_code: 'true'
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#copy_blob_source_button" do
|
||||
|
|
|
|||
|
|
@ -90,22 +90,22 @@ RSpec.describe BitbucketServer::Representation::Comment, feature_category: :impo
|
|||
hash_including(
|
||||
note: 'Hello world',
|
||||
comments: [],
|
||||
parent_comment: { note: 'is this a new line?' }
|
||||
parent_comment_note: 'is this a new line?'
|
||||
),
|
||||
hash_including(
|
||||
note: 'Ok',
|
||||
comments: [],
|
||||
parent_comment: { note: 'Hello world' }
|
||||
parent_comment_note: 'Hello world'
|
||||
),
|
||||
hash_including(
|
||||
note: 'hi',
|
||||
comments: [],
|
||||
parent_comment: { note: 'Hello world' }
|
||||
parent_comment_note: 'Hello world'
|
||||
),
|
||||
hash_including(
|
||||
note: 'hello',
|
||||
comments: [],
|
||||
parent_comment: { note: 'is this a new line?' }
|
||||
parent_comment_note: 'is this a new line?'
|
||||
)
|
||||
)
|
||||
)
|
||||
|
|
|
|||
|
|
@ -20,6 +20,16 @@ RSpec.describe ExtractsRef::RefExtractor, feature_category: :source_code_managem
|
|||
allow(container.repository).to receive(:ref_names).and_return(ref_names)
|
||||
end
|
||||
|
||||
describe '#initialize' do
|
||||
let(:params) { { id: 1, ref: 2, path: 3, ref_type: 4 } }
|
||||
|
||||
it 'does not mutate provided params' do
|
||||
ref_extractor
|
||||
|
||||
expect(params).to eq(id: 1, ref: 2, path: 3, ref_type: 4)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#extract_vars!' do
|
||||
it_behaves_like 'extracts ref vars'
|
||||
|
||||
|
|
|
|||
|
|
@ -146,7 +146,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::NotesImporter, feature_
|
|||
end
|
||||
|
||||
it 'does not schedule job for processed merge requests', :aggregate_failures do
|
||||
expect(Gitlab::BitbucketServerImport::ImportPullRequestNotesWorker).not_to receive(:perform_in)
|
||||
expect(Gitlab::BitbucketServerImport::ImportPullRequestNoteWorker).not_to receive(:perform_in)
|
||||
|
||||
waiter = importer.execute
|
||||
|
||||
|
|
@ -164,7 +164,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::NotesImporter, feature_
|
|||
end
|
||||
|
||||
it 'imports the stand alone comments' do
|
||||
expect(Gitlab::BitbucketServerImport::ImportPullRequestNotesWorker).to receive(:perform_in).with(
|
||||
expect(Gitlab::BitbucketServerImport::ImportPullRequestNoteWorker).to receive(:perform_in).with(
|
||||
anything,
|
||||
project.id,
|
||||
hash_including(
|
||||
|
|
@ -196,7 +196,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::NotesImporter, feature_
|
|||
end
|
||||
|
||||
it 'imports the inline comment' do
|
||||
expect(Gitlab::BitbucketServerImport::ImportPullRequestNotesWorker).to receive(:perform_in).with(
|
||||
expect(Gitlab::BitbucketServerImport::ImportPullRequestNoteWorker).to receive(:perform_in).with(
|
||||
anything,
|
||||
project.id,
|
||||
hash_including(
|
||||
|
|
@ -228,7 +228,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::NotesImporter, feature_
|
|||
end
|
||||
|
||||
it 'imports the merge event' do
|
||||
expect(Gitlab::BitbucketServerImport::ImportPullRequestNotesWorker).to receive(:perform_in).with(
|
||||
expect(Gitlab::BitbucketServerImport::ImportPullRequestNoteWorker).to receive(:perform_in).with(
|
||||
anything,
|
||||
project.id,
|
||||
hash_including(
|
||||
|
|
@ -260,7 +260,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::NotesImporter, feature_
|
|||
end
|
||||
|
||||
it 'imports the approved event' do
|
||||
expect(Gitlab::BitbucketServerImport::ImportPullRequestNotesWorker).to receive(:perform_in).with(
|
||||
expect(Gitlab::BitbucketServerImport::ImportPullRequestNoteWorker).to receive(:perform_in).with(
|
||||
anything,
|
||||
project.id,
|
||||
hash_including(
|
||||
|
|
|
|||
|
|
@ -0,0 +1,210 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNoteImporter, feature_category: :importers do
|
||||
include AfterNextHelpers
|
||||
|
||||
let_it_be_with_reload(:project) do
|
||||
create(:project, :repository, :import_started,
|
||||
import_data_attributes: {
|
||||
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
|
||||
credentials: { 'token' => 'token' }
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:merge_request_iid) { 7 }
|
||||
let_it_be(:object) do
|
||||
{
|
||||
iid: merge_request_iid,
|
||||
comment_type: 'merge_event',
|
||||
comment_id: 123,
|
||||
comment: {}
|
||||
}
|
||||
end
|
||||
|
||||
def expect_log(stage:, message:, iid:)
|
||||
allow(Gitlab::BitbucketServerImport::Logger).to receive(:info).and_call_original
|
||||
expect(Gitlab::BitbucketServerImport::Logger)
|
||||
.to receive(:info).with(include(import_stage: stage, message: message, iid: iid))
|
||||
end
|
||||
|
||||
subject(:importer) { described_class.new(project.reload, object.to_hash) }
|
||||
|
||||
describe '#execute' do
|
||||
shared_examples 'import is skipped' do
|
||||
it 'does not log and does not import notes' do
|
||||
expect(Gitlab::BitbucketServerImport::Logger)
|
||||
.not_to receive(:info).with(include(import_stage: 'import_pull_request_note', message: 'starting'))
|
||||
|
||||
expect { importer.execute }.not_to change { Note.count }
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a matching merge request is not found' do
|
||||
it 'logs its progress' do
|
||||
expect_next(Gitlab::BitbucketServerImport::Importers::PullRequestNotes::BaseImporter).not_to receive(:execute)
|
||||
|
||||
expect_log(stage: 'import_pull_request_note', message: 'starting', iid: merge_request_iid)
|
||||
expect_log(stage: 'import_pull_request_note', message: 'skipped', iid: merge_request_iid)
|
||||
expect_log(stage: 'import_pull_request_note', message: 'finished', iid: merge_request_iid)
|
||||
|
||||
importer.execute
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a matching merge request is found' do
|
||||
let_it_be(:merge_request) { create(:merge_request, iid: merge_request_iid, source_project: project) }
|
||||
|
||||
context 'when a matching importer is not found' do
|
||||
let_it_be(:object) do
|
||||
{
|
||||
iid: merge_request_iid,
|
||||
comment_type: 'unknown',
|
||||
comment_id: 123,
|
||||
comment: {}
|
||||
}
|
||||
end
|
||||
|
||||
it 'logs its progress' do
|
||||
expect_next(Gitlab::BitbucketServerImport::Importers::PullRequestNotes::BaseImporter).not_to receive(:execute)
|
||||
|
||||
expect_log(stage: 'import_pull_request_note', message: 'starting', iid: merge_request_iid)
|
||||
allow(Gitlab::BitbucketServerImport::Logger).to receive(:debug).and_call_original
|
||||
expect(Gitlab::BitbucketServerImport::Logger)
|
||||
.to receive(:debug).with(
|
||||
include(message: 'UNSUPPORTED_EVENT_TYPE', comment_type: 'unknown', comment_id: 123)
|
||||
)
|
||||
expect_log(stage: 'import_pull_request_note', message: 'finished', iid: merge_request_iid)
|
||||
|
||||
importer.execute
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a matching importer found' do
|
||||
context 'when comment type is merge_event' do
|
||||
let_it_be(:object) do
|
||||
{
|
||||
iid: merge_request_iid,
|
||||
comment_type: 'merge_event',
|
||||
comment_id: 123,
|
||||
comment: {}
|
||||
}
|
||||
end
|
||||
|
||||
it 'imports the merge_event' do
|
||||
expect_next(
|
||||
Gitlab::BitbucketServerImport::Importers::PullRequestNotes::MergeEvent,
|
||||
project,
|
||||
merge_request
|
||||
).to receive(:execute).with(object[:comment])
|
||||
|
||||
expect_log(stage: 'import_pull_request_note', message: 'starting', iid: merge_request_iid)
|
||||
expect_log(stage: 'import_pull_request_note', message: 'finished', iid: merge_request_iid)
|
||||
|
||||
importer.execute
|
||||
end
|
||||
end
|
||||
|
||||
context 'when comment type is approved_event' do
|
||||
let_it_be(:object) do
|
||||
{
|
||||
iid: merge_request_iid,
|
||||
comment_type: 'approved_event',
|
||||
comment_id: 123,
|
||||
comment: {}
|
||||
}
|
||||
end
|
||||
|
||||
it 'imports the approved_event' do
|
||||
expect_next(
|
||||
Gitlab::BitbucketServerImport::Importers::PullRequestNotes::ApprovedEvent,
|
||||
project,
|
||||
merge_request
|
||||
).to receive(:execute).with(object[:comment])
|
||||
|
||||
expect_log(stage: 'import_pull_request_note', message: 'starting', iid: merge_request_iid)
|
||||
expect_log(stage: 'import_pull_request_note', message: 'finished', iid: merge_request_iid)
|
||||
|
||||
importer.execute
|
||||
end
|
||||
end
|
||||
|
||||
context 'when comment type is inline' do
|
||||
let_it_be(:object) do
|
||||
{
|
||||
iid: merge_request_iid,
|
||||
comment_type: 'inline',
|
||||
comment_id: 123,
|
||||
comment: {}
|
||||
}
|
||||
end
|
||||
|
||||
it 'imports the inline comment' do
|
||||
expect_next(
|
||||
Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Inline,
|
||||
project,
|
||||
merge_request
|
||||
).to receive(:execute).with(object[:comment])
|
||||
|
||||
expect_log(stage: 'import_pull_request_note', message: 'starting', iid: merge_request_iid)
|
||||
expect_log(stage: 'import_pull_request_note', message: 'finished', iid: merge_request_iid)
|
||||
|
||||
importer.execute
|
||||
end
|
||||
end
|
||||
|
||||
context 'when comment type is standalone_notes' do
|
||||
let_it_be(:object) do
|
||||
{
|
||||
iid: merge_request_iid,
|
||||
comment_type: 'standalone_notes',
|
||||
comment_id: 123,
|
||||
comment: {}
|
||||
}
|
||||
end
|
||||
|
||||
it 'imports the standalone_notes comment' do
|
||||
expect_next(
|
||||
Gitlab::BitbucketServerImport::Importers::PullRequestNotes::StandaloneNotes,
|
||||
project,
|
||||
merge_request
|
||||
).to receive(:execute).with(object[:comment])
|
||||
|
||||
expect_log(stage: 'import_pull_request_note', message: 'starting', iid: merge_request_iid)
|
||||
expect_log(stage: 'import_pull_request_note', message: 'finished', iid: merge_request_iid)
|
||||
|
||||
importer.execute
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the project has been marked as failed' do
|
||||
before do
|
||||
project.import_state.mark_as_failed('error')
|
||||
end
|
||||
|
||||
include_examples 'import is skipped'
|
||||
end
|
||||
|
||||
context 'when the import data does not have credentials' do
|
||||
before do
|
||||
project.import_data.credentials = nil
|
||||
project.import_data.save!
|
||||
end
|
||||
|
||||
include_examples 'import is skipped'
|
||||
end
|
||||
|
||||
context 'when the import data does not have data' do
|
||||
before do
|
||||
project.import_data.data = nil
|
||||
project.import_data.save!
|
||||
end
|
||||
|
||||
include_examples 'import is skipped'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,119 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::ApprovedEvent, feature_category: :importers do
|
||||
let_it_be(:project) do
|
||||
create(:project, :repository, :import_started,
|
||||
import_data_attributes: {
|
||||
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
|
||||
credentials: { 'token' => 'token' }
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
|
||||
let_it_be(:now) { Time.now.utc.change(usec: 0) }
|
||||
|
||||
let!(:pull_request_author) do
|
||||
create(:user, username: 'pull_request_author', email: 'pull_request_author@example.org')
|
||||
end
|
||||
|
||||
let(:approved_event) do
|
||||
{
|
||||
id: 4,
|
||||
approver_username: pull_request_author.username,
|
||||
approver_email: pull_request_author.email,
|
||||
created_at: now
|
||||
}
|
||||
end
|
||||
|
||||
def expect_log(stage:, message:, iid:, event_id:)
|
||||
allow(Gitlab::BitbucketServerImport::Logger).to receive(:info).and_call_original
|
||||
expect(Gitlab::BitbucketServerImport::Logger)
|
||||
.to receive(:info).with(include(import_stage: stage, message: message, iid: iid, event_id: event_id))
|
||||
end
|
||||
|
||||
subject(:importer) { described_class.new(project, merge_request) }
|
||||
|
||||
describe '#execute', :clean_gitlab_redis_shared_state do
|
||||
it 'creates the approval, reviewer and approval note' do
|
||||
expect { importer.execute(approved_event) }
|
||||
.to change { merge_request.approvals.count }.from(0).to(1)
|
||||
.and change { merge_request.notes.count }.from(0).to(1)
|
||||
.and change { merge_request.reviewers.count }.from(0).to(1)
|
||||
|
||||
approval = merge_request.approvals.first
|
||||
|
||||
expect(approval.user).to eq(pull_request_author)
|
||||
expect(approval.created_at).to eq(now)
|
||||
|
||||
note = merge_request.notes.first
|
||||
|
||||
expect(note.note).to eq('approved this merge request')
|
||||
expect(note.author).to eq(pull_request_author)
|
||||
expect(note.system).to be_truthy
|
||||
expect(note.created_at).to eq(now)
|
||||
|
||||
reviewer = merge_request.reviewers.first
|
||||
|
||||
expect(reviewer.id).to eq(pull_request_author.id)
|
||||
end
|
||||
|
||||
context 'when a user with a matching username does not exist' do
|
||||
before do
|
||||
pull_request_author.update!(username: 'another_username')
|
||||
end
|
||||
|
||||
it 'finds the user based on email' do
|
||||
importer.execute(approved_event)
|
||||
|
||||
approval = merge_request.approvals.first
|
||||
|
||||
expect(approval.user).to eq(pull_request_author)
|
||||
end
|
||||
|
||||
context 'when no users match email or username' do
|
||||
let_it_be(:another_author) { create(:user) }
|
||||
|
||||
before do
|
||||
pull_request_author.destroy!
|
||||
end
|
||||
|
||||
it 'does not set an approver' do
|
||||
expect_log(
|
||||
stage: 'import_approved_event',
|
||||
message: 'skipped due to missing user',
|
||||
iid: merge_request.iid,
|
||||
event_id: 4
|
||||
)
|
||||
|
||||
expect { importer.execute(approved_event) }
|
||||
.to not_change { merge_request.approvals.count }
|
||||
.and not_change { merge_request.notes.count }
|
||||
.and not_change { merge_request.reviewers.count }
|
||||
|
||||
expect(merge_request.approvals).to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'if the reviewer already existed' do
|
||||
before do
|
||||
merge_request.reviewers = [pull_request_author]
|
||||
merge_request.save!
|
||||
end
|
||||
|
||||
it 'does not create the reviewer record' do
|
||||
expect { importer.execute(approved_event) }.not_to change { merge_request.reviewers.count }
|
||||
end
|
||||
end
|
||||
|
||||
it 'logs its progress' do
|
||||
expect_log(stage: 'import_approved_event', message: 'starting', iid: merge_request.iid, event_id: 4)
|
||||
expect_log(stage: 'import_approved_event', message: 'finished', iid: merge_request.iid, event_id: 4)
|
||||
|
||||
importer.execute(approved_event)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::BaseImporter, feature_category: :importers do
|
||||
let_it_be(:project) do
|
||||
build_stubbed(:project, :repository, :import_started,
|
||||
import_data_attributes: {
|
||||
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
|
||||
credentials: { 'token' => 'token' }
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:merge_request) { build_stubbed(:merge_request, source_project: project) }
|
||||
let_it_be(:importer_class) { Class.new(described_class) }
|
||||
let_it_be(:importer_instance) { importer_class.new(project, merge_request) }
|
||||
|
||||
describe '#execute' do
|
||||
it { expect { importer_instance.execute({}) }.to raise_error(NotImplementedError) }
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,156 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Inline, feature_category: :importers do
|
||||
let_it_be(:project) do
|
||||
create(:project, :repository, :import_started,
|
||||
import_data_attributes: {
|
||||
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
|
||||
credentials: { 'token' => 'token' }
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
|
||||
let_it_be(:now) { Time.now.utc.change(usec: 0) }
|
||||
let_it_be(:mentions_converter) { Gitlab::Import::MentionsConverter.new('bitbucket_server', project) }
|
||||
let_it_be(:reply_author) { create(:user, username: 'reply_author', email: 'reply_author@example.org') }
|
||||
let_it_be(:inline_note_author) do
|
||||
create(:user, username: 'inline_note_author', email: 'inline_note_author@example.org')
|
||||
end
|
||||
|
||||
let(:reply) do
|
||||
{
|
||||
author_email: reply_author.email,
|
||||
author_username: reply_author.username,
|
||||
note: 'I agree',
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
parent_comment_note: nil
|
||||
}
|
||||
end
|
||||
|
||||
let(:pr_inline_comment) do
|
||||
{
|
||||
id: 7,
|
||||
file_type: 'ADDED',
|
||||
from_sha: 'c5f4288162e2e6218180779c7f6ac1735bb56eab',
|
||||
to_sha: 'a4c2164330f2549f67c13f36a93884cf66e976be',
|
||||
file_path: '.gitmodules',
|
||||
old_pos: nil,
|
||||
new_pos: 4,
|
||||
note: 'Hello world',
|
||||
author_email: inline_note_author.email,
|
||||
author_username: inline_note_author.username,
|
||||
comments: [reply],
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
parent_comment_note: nil
|
||||
}
|
||||
end
|
||||
|
||||
before do
|
||||
allow(Gitlab::Import::MentionsConverter).to receive(:new).and_return(mentions_converter)
|
||||
end
|
||||
|
||||
def expect_log(stage:, message:, iid:, comment_id:)
|
||||
allow(Gitlab::BitbucketServerImport::Logger).to receive(:info).and_call_original
|
||||
expect(Gitlab::BitbucketServerImport::Logger)
|
||||
.to receive(:info).with(include(import_stage: stage, message: message, iid: iid, comment_id: comment_id))
|
||||
end
|
||||
|
||||
subject(:importer) { described_class.new(project, merge_request) }
|
||||
|
||||
describe '#execute' do
|
||||
it 'imports the threaded discussion' do
|
||||
expect(mentions_converter).to receive(:convert).and_call_original.twice
|
||||
|
||||
expect { importer.execute(pr_inline_comment) }.to change { Note.count }.by(2)
|
||||
|
||||
expect(merge_request.discussions.count).to eq(1)
|
||||
|
||||
notes = merge_request.notes.order(:id).to_a
|
||||
start_note = notes.first
|
||||
expect(start_note.type).to eq('DiffNote')
|
||||
expect(start_note.note).to end_with(pr_inline_comment[:note])
|
||||
expect(start_note.created_at).to eq(pr_inline_comment[:created_at])
|
||||
expect(start_note.updated_at).to eq(pr_inline_comment[:updated_at])
|
||||
expect(start_note.position.old_line).to be_nil
|
||||
expect(start_note.position.new_line).to eq(pr_inline_comment[:new_pos])
|
||||
expect(start_note.author).to eq(inline_note_author)
|
||||
|
||||
reply_note = notes.last
|
||||
expect(reply_note.note).to eq(reply[:note])
|
||||
expect(reply_note.author).to eq(reply_author)
|
||||
expect(reply_note.created_at).to eq(reply[:created_at])
|
||||
expect(reply_note.updated_at).to eq(reply[:created_at])
|
||||
expect(reply_note.position.old_line).to be_nil
|
||||
expect(reply_note.position.new_line).to eq(pr_inline_comment[:new_pos])
|
||||
end
|
||||
|
||||
it 'logs its progress' do
|
||||
expect_log(stage: 'import_inline_comments', message: 'starting', iid: merge_request.iid, comment_id: 7)
|
||||
expect_log(stage: 'import_inline_comments', message: 'finished', iid: merge_request.iid, comment_id: 7)
|
||||
|
||||
importer.execute(pr_inline_comment)
|
||||
end
|
||||
|
||||
context 'when note is invalid' do
|
||||
let(:invalid_comment) do
|
||||
{
|
||||
id: 7,
|
||||
file_type: 'ADDED',
|
||||
from_sha: 'c5f4288162e2e6218180779c7f6ac1735bb56eab',
|
||||
to_sha: 'a4c2164330f2549f67c13f36a93884cf66e976be',
|
||||
file_path: '.gitmodules',
|
||||
old_pos: 3,
|
||||
new_pos: 4,
|
||||
note: '',
|
||||
author_email: inline_note_author.email,
|
||||
author_username: inline_note_author.username,
|
||||
comments: [],
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
parent_comment_note: nil
|
||||
}
|
||||
end
|
||||
|
||||
it 'fallback to basic note' do
|
||||
expect(mentions_converter).to receive(:convert).and_call_original.twice
|
||||
|
||||
expect { importer.execute(invalid_comment) }.to change { Note.count }.by(1)
|
||||
|
||||
expect(merge_request.discussions.count).to eq(1)
|
||||
|
||||
notes = merge_request.notes.order(:id).to_a
|
||||
start_note = notes.first
|
||||
expect(start_note.note).to start_with("*Comment on .gitmodules:3 --> .gitmodules:4*")
|
||||
expect(start_note.created_at).to eq(invalid_comment[:created_at])
|
||||
expect(start_note.updated_at).to eq(invalid_comment[:updated_at])
|
||||
end
|
||||
|
||||
it 'logs its fallback' do
|
||||
expect(mentions_converter).to receive(:convert).and_call_original.twice
|
||||
expect_log(
|
||||
stage: 'create_diff_note',
|
||||
message: 'creating standalone fallback for DiffNote',
|
||||
iid: merge_request.iid,
|
||||
comment_id: 7
|
||||
)
|
||||
|
||||
importer.execute(invalid_comment)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when converting mention is failed' do
|
||||
it 'logs its exception' do
|
||||
expect(mentions_converter).to receive(:convert).and_raise(StandardError)
|
||||
expect(Gitlab::ErrorTracking).to receive(:log_exception)
|
||||
.with(StandardError, include(import_stage: 'create_diff_note'))
|
||||
|
||||
importer.execute(pr_inline_comment)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::MergeEvent, feature_category: :importers do
|
||||
let_it_be(:project) do
|
||||
create(:project, :repository, :import_started,
|
||||
import_data_attributes: {
|
||||
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
|
||||
credentials: { 'token' => 'token' }
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
|
||||
let_it_be(:now) { Time.now.utc.change(usec: 0) }
|
||||
|
||||
let_it_be(:pull_request_author) do
|
||||
create(:user, username: 'pull_request_author', email: 'pull_request_author@example.org')
|
||||
end
|
||||
|
||||
let_it_be(:merge_event) do
|
||||
{
|
||||
id: 3,
|
||||
committer_email: pull_request_author.email,
|
||||
merge_timestamp: now,
|
||||
merge_commit: '12345678'
|
||||
}
|
||||
end
|
||||
|
||||
def expect_log(stage:, message:, iid:, event_id:)
|
||||
allow(Gitlab::BitbucketServerImport::Logger).to receive(:info).and_call_original
|
||||
expect(Gitlab::BitbucketServerImport::Logger)
|
||||
.to receive(:info).with(include(import_stage: stage, message: message, iid: iid, event_id: event_id))
|
||||
end
|
||||
|
||||
subject(:importer) { described_class.new(project, merge_request) }
|
||||
|
||||
describe '#execute' do
|
||||
it 'imports the merge event' do
|
||||
importer.execute(merge_event)
|
||||
|
||||
merge_request.reload
|
||||
|
||||
expect(merge_request.metrics.merged_by).to eq(pull_request_author)
|
||||
expect(merge_request.metrics.merged_at).to eq(merge_event[:merge_timestamp])
|
||||
expect(merge_request.merge_commit_sha).to eq(merge_event[:merge_commit])
|
||||
end
|
||||
|
||||
it 'logs its progress' do
|
||||
expect_log(stage: 'import_merge_event', message: 'starting', iid: merge_request.iid, event_id: 3)
|
||||
expect_log(stage: 'import_merge_event', message: 'finished', iid: merge_request.iid, event_id: 3)
|
||||
|
||||
importer.execute(merge_event)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,174 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::StandaloneNotes, feature_category: :importers do
|
||||
let_it_be(:project) do
|
||||
create(:project, :repository, :import_started,
|
||||
import_data_attributes: {
|
||||
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
|
||||
credentials: { 'token' => 'token' }
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
|
||||
let_it_be(:now) { Time.now.utc.change(usec: 0) }
|
||||
let_it_be(:note_author) { create(:user, username: 'note_author', email: 'note_author@example.org') }
|
||||
let_it_be(:mentions_converter) { Gitlab::Import::MentionsConverter.new('bitbucket_server', project) }
|
||||
|
||||
let(:pr_comment) do
|
||||
{
|
||||
id: 5,
|
||||
note: 'Hello world',
|
||||
author_email: note_author.email,
|
||||
author_username: note_author.username,
|
||||
comments: [],
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
parent_comment_note: nil
|
||||
}
|
||||
end
|
||||
|
||||
before do
|
||||
allow(Gitlab::Import::MentionsConverter).to receive(:new).and_return(mentions_converter)
|
||||
end
|
||||
|
||||
def expect_log(stage:, message:, iid:, comment_id:)
|
||||
allow(Gitlab::BitbucketServerImport::Logger).to receive(:info).and_call_original
|
||||
expect(Gitlab::BitbucketServerImport::Logger)
|
||||
.to receive(:info).with(include(import_stage: stage, message: message, iid: iid, comment_id: comment_id))
|
||||
end
|
||||
|
||||
subject(:importer) { described_class.new(project, merge_request) }
|
||||
|
||||
describe '#execute' do
|
||||
it 'imports the stand alone comments' do
|
||||
expect(mentions_converter).to receive(:convert).and_call_original
|
||||
|
||||
expect { importer.execute(pr_comment) }.to change { Note.count }.by(1)
|
||||
|
||||
expect(merge_request.notes.count).to eq(1)
|
||||
expect(merge_request.notes.first).to have_attributes(
|
||||
note: end_with(pr_comment[:note]),
|
||||
author: note_author,
|
||||
created_at: pr_comment[:created_at],
|
||||
updated_at: pr_comment[:created_at]
|
||||
)
|
||||
end
|
||||
|
||||
context 'when the note has multiple comments' do
|
||||
let(:pr_comment_extra) do
|
||||
{
|
||||
id: 6,
|
||||
note: 'Foo bar',
|
||||
author_email: note_author.email,
|
||||
author_username: note_author.username,
|
||||
comments: [],
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
parent_comment_note: nil
|
||||
}
|
||||
end
|
||||
|
||||
let(:pr_comment) do
|
||||
{
|
||||
id: 5,
|
||||
note: 'Hello world',
|
||||
author_email: note_author.email,
|
||||
author_username: note_author.username,
|
||||
comments: [pr_comment_extra],
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
parent_comment_note: nil
|
||||
}
|
||||
end
|
||||
|
||||
it 'imports multiple comments' do
|
||||
expect(mentions_converter).to receive(:convert).and_call_original.twice
|
||||
|
||||
expect { importer.execute(pr_comment) }.to change { Note.count }.by(2)
|
||||
|
||||
expect(merge_request.notes.count).to eq(2)
|
||||
expect(merge_request.notes.first).to have_attributes(
|
||||
note: end_with(pr_comment[:note]),
|
||||
author: note_author,
|
||||
created_at: pr_comment[:created_at],
|
||||
updated_at: pr_comment[:created_at]
|
||||
)
|
||||
expect(merge_request.notes.last).to have_attributes(
|
||||
note: end_with(pr_comment_extra[:note]),
|
||||
author: note_author,
|
||||
created_at: pr_comment_extra[:created_at],
|
||||
updated_at: pr_comment_extra[:created_at]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the author is not found' do
|
||||
before do
|
||||
allow_next_instance_of(Gitlab::BitbucketServerImport::UserFinder) do |user_finder|
|
||||
allow(user_finder).to receive(:uid).and_return(nil)
|
||||
end
|
||||
end
|
||||
|
||||
it 'adds a note with the author username and email' do
|
||||
importer.execute(pr_comment)
|
||||
|
||||
expect(Note.first.note).to include("*By #{note_author.username} (#{note_author.email})")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the note has a parent note' do
|
||||
let(:pr_comment) do
|
||||
{
|
||||
id: 5,
|
||||
note: 'Note',
|
||||
author_email: note_author.email,
|
||||
author_username: note_author.username,
|
||||
comments: [],
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
parent_comment_note: 'Parent note'
|
||||
}
|
||||
end
|
||||
|
||||
it 'adds the parent note before the actual note' do
|
||||
importer.execute(pr_comment)
|
||||
|
||||
expect(Note.first.note).to include("> #{pr_comment[:parent_comment_note]}\n\n")
|
||||
end
|
||||
|
||||
it 'logs its progress' do
|
||||
expect_log(
|
||||
stage: 'import_standalone_notes_comments',
|
||||
message: 'starting',
|
||||
iid: merge_request.iid,
|
||||
comment_id: 5
|
||||
)
|
||||
expect_log(
|
||||
stage: 'import_standalone_notes_comments',
|
||||
message: 'finished',
|
||||
iid: merge_request.iid,
|
||||
comment_id: 5
|
||||
)
|
||||
|
||||
importer.execute(pr_comment)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when saving notes is failed' do
|
||||
before do
|
||||
allow(merge_request.notes).to receive(:create!).and_raise(StandardError)
|
||||
end
|
||||
|
||||
it 'logs its exception' do
|
||||
expect(mentions_converter).to receive(:convert).and_call_original
|
||||
expect(Gitlab::ErrorTracking).to receive(:log_exception)
|
||||
.with(StandardError, include(import_stage: 'import_standalone_notes_comments'))
|
||||
|
||||
importer.execute(pr_comment)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -8,7 +8,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
|
|||
let_it_be_with_reload(:project) do
|
||||
create(:project, :repository, :import_started,
|
||||
import_data_attributes: {
|
||||
data: { 'project_key' => 'key', 'repo_slug' => 'slug', 'bitbucket_server_notes_separate_worker' => false },
|
||||
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
|
||||
credentials: { 'token' => 'token' }
|
||||
}
|
||||
)
|
||||
|
|
@ -16,7 +16,6 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
|
|||
|
||||
let_it_be(:pull_request_data) { Gitlab::Json.parse(fixture_file('importers/bitbucket_server/pull_request.json')) }
|
||||
let_it_be(:pull_request) { BitbucketServer::Representation::PullRequest.new(pull_request_data) }
|
||||
let_it_be(:object) { pull_request }
|
||||
let_it_be(:note_author) { create(:user, username: 'note_author', email: 'note_author@example.org') }
|
||||
let(:mentions_converter) { Gitlab::Import::MentionsConverter.new('bitbucket_server', project) }
|
||||
|
||||
|
|
@ -85,7 +84,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
|
|||
allow(Gitlab::Import::MentionsConverter).to receive(:new).and_return(mentions_converter)
|
||||
end
|
||||
|
||||
subject(:importer) { described_class.new(project.reload, object.to_hash) }
|
||||
subject(:importer) { described_class.new(project.reload, pull_request.to_hash) }
|
||||
|
||||
describe '#execute' do
|
||||
context 'when a matching merge request is not found' do
|
||||
|
|
@ -113,7 +112,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
|
|||
importer.execute
|
||||
end
|
||||
|
||||
shared_examples 'import standalone comment' do
|
||||
context 'when PR has comments' do
|
||||
before do
|
||||
allow_next(BitbucketServer::Client).to receive(:activities).and_return([pr_comment])
|
||||
end
|
||||
|
|
@ -200,7 +199,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
|
|||
end
|
||||
end
|
||||
|
||||
shared_examples 'import inline comment' do
|
||||
context 'when PR has threaded discussion' do
|
||||
let_it_be(:reply_author) { create(:user, username: 'reply_author', email: 'reply_author@example.org') }
|
||||
let_it_be(:inline_note_author) do
|
||||
create(:user, username: 'inline_note_author', email: 'inline_note_author@example.org')
|
||||
|
|
@ -294,7 +293,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
|
|||
end
|
||||
end
|
||||
|
||||
shared_examples 'import merge event' do
|
||||
context 'when PR has a merge event' do
|
||||
before do
|
||||
allow_next(BitbucketServer::Client).to receive(:activities).and_return([merge_event])
|
||||
end
|
||||
|
|
@ -310,7 +309,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
|
|||
end
|
||||
end
|
||||
|
||||
shared_examples 'import approved event' do
|
||||
context 'when PR has an approved event' do
|
||||
before do
|
||||
allow_next(BitbucketServer::Client).to receive(:activities).and_return([approved_event])
|
||||
end
|
||||
|
|
@ -380,126 +379,6 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when PR has comments' do
|
||||
include_examples 'import standalone comment'
|
||||
end
|
||||
|
||||
context 'when PR has threaded discussion' do
|
||||
include_examples 'import inline comment'
|
||||
end
|
||||
|
||||
context 'when PR has a merge event' do
|
||||
include_examples 'import merge event'
|
||||
end
|
||||
|
||||
context 'when PR has an approved event' do
|
||||
include_examples 'import approved event'
|
||||
end
|
||||
|
||||
context 'when bitbucket_server_notes_separate_worker is true', :clean_gitlab_redis_shared_state do
|
||||
let_it_be_with_reload(:project) do
|
||||
create(:project, :repository, :import_started,
|
||||
import_data_attributes: {
|
||||
data: { 'project_key' => 'key', 'repo_slug' => 'slug', 'bitbucket_server_notes_separate_worker' => true },
|
||||
credentials: { 'token' => 'token' }
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:merge_request) { create(:merge_request, iid: pull_request.iid, source_project: project) }
|
||||
|
||||
context 'when comment type is merge_event' do
|
||||
let(:object) do
|
||||
{
|
||||
iid: pull_request.iid,
|
||||
comment_type: 'merge_event',
|
||||
comment_id: 123,
|
||||
comment: {
|
||||
committer_email: pull_request_author.email,
|
||||
merge_timestamp: now,
|
||||
merge_commit: '12345678'
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
include_examples 'import merge event'
|
||||
end
|
||||
|
||||
context 'when comment type is approved_event' do
|
||||
let(:object) do
|
||||
{
|
||||
iid: pull_request.iid,
|
||||
comment_type: 'approved_event',
|
||||
comment_id: 123,
|
||||
comment: {
|
||||
approver_username: pull_request_author.username,
|
||||
approver_email: pull_request_author.email,
|
||||
created_at: now
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
include_examples 'import approved event'
|
||||
end
|
||||
|
||||
context 'when comment type is inline' do
|
||||
let(:object) do
|
||||
{
|
||||
iid: pull_request.iid,
|
||||
comment_type: 'inline',
|
||||
comment_id: 123,
|
||||
comment: {
|
||||
file_type: 'ADDED',
|
||||
from_sha: pull_request.target_branch_sha,
|
||||
to_sha: pull_request.source_branch_sha,
|
||||
file_path: '.gitmodules',
|
||||
old_pos: nil,
|
||||
new_pos: 4,
|
||||
note: 'Hello world',
|
||||
author_email: inline_note_author.email,
|
||||
author_username: inline_note_author.username,
|
||||
comments: [
|
||||
author_email: reply_author.email,
|
||||
author_username: reply_author.username,
|
||||
note: 'I agree',
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
parent_comment: nil
|
||||
],
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
parent_comment: nil
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
include_examples 'import inline comment'
|
||||
end
|
||||
|
||||
context 'when comment type is standalone_pr' do
|
||||
let_it_be(:object) do
|
||||
{
|
||||
iid: pull_request.iid,
|
||||
comment_type: 'standalone_notes',
|
||||
comment_id: 123,
|
||||
comment: {
|
||||
note: 'Hello world',
|
||||
author_email: note_author.email,
|
||||
author_username: note_author.username,
|
||||
comments: [],
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
parent_comment: {
|
||||
note: 'Parent note'
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
include_examples 'import standalone comment'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'import is skipped' do
|
||||
|
|
|
|||
|
|
@ -0,0 +1,103 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe 'Destroying a model version', feature_category: :mlops do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
include GraphqlHelpers
|
||||
|
||||
let_it_be_with_reload(:model_version) { create(:ml_model_versions) }
|
||||
let_it_be(:user) { create(:user) }
|
||||
|
||||
let(:project) { model_version.project }
|
||||
let(:id) { model_version.to_global_id.to_s }
|
||||
|
||||
let(:query) do
|
||||
<<~GQL
|
||||
modelVersion {
|
||||
id
|
||||
}
|
||||
errors
|
||||
GQL
|
||||
end
|
||||
|
||||
let(:params) { { id: id } }
|
||||
let(:mutation) { graphql_mutation(:ml_model_version_delete, params, query) }
|
||||
let(:mutation_response) { graphql_mutation_response(:ml_model_version_delete) }
|
||||
|
||||
shared_examples 'destroying the model' do
|
||||
it 'destroys model' do
|
||||
expect(::Ml::DestroyModelVersionService).to receive(:new).with(model_version, user).and_call_original
|
||||
|
||||
expect { mutation_request }.to change { ::Ml::ModelVersion.count }.by(-1)
|
||||
expect(mutation_response['modelVersion']).to eq({ "id" => GitlabSchema.id_from_object(model_version).to_s })
|
||||
end
|
||||
|
||||
it_behaves_like 'returning response status', :success
|
||||
end
|
||||
|
||||
shared_examples 'model version was not found' do
|
||||
it 'does not delete the model' do
|
||||
expect(::Ml::DestroyModelVersionService).not_to receive(:new)
|
||||
|
||||
expect { mutation_request }.to not_change { ::Ml::ModelVersion.count }
|
||||
|
||||
expect(mutation_response["errors"]).to match_array(['Model version not found'])
|
||||
end
|
||||
|
||||
it_behaves_like 'returning response status', :success
|
||||
end
|
||||
|
||||
describe 'post graphql mutation' do
|
||||
subject(:mutation_request) { post_graphql_mutation(mutation, current_user: user) }
|
||||
|
||||
context 'with valid id' do
|
||||
where(:user_role, :mutation_behavior) do
|
||||
:maintainer | 'destroying the model'
|
||||
:developer | 'destroying the model'
|
||||
:reporter | 'destroying the model'
|
||||
:guest | 'a mutation that returns a top-level access error'
|
||||
:anonymous | 'a mutation that returns a top-level access error'
|
||||
end
|
||||
|
||||
with_them do
|
||||
let(:current_user) { user }
|
||||
|
||||
before do
|
||||
project.public_send("add_#{user_role}", user) unless user_role == :anonymous
|
||||
end
|
||||
|
||||
it_behaves_like params[:mutation_behavior]
|
||||
end
|
||||
end
|
||||
|
||||
context 'with authorized user' do
|
||||
before do
|
||||
project.add_maintainer(user)
|
||||
end
|
||||
|
||||
context 'with invalid id' do
|
||||
let(:params) do
|
||||
{ id: "gid://gitlab/Ml::ModelVersion/#{non_existing_record_id}" }
|
||||
end
|
||||
|
||||
it_behaves_like 'model version was not found'
|
||||
end
|
||||
|
||||
context 'when an error occurs' do
|
||||
it 'returns the errors in the response' do
|
||||
allow_next_found_instance_of(::Ml::ModelVersion) do |model|
|
||||
allow(model).to receive(:destroy).and_return(nil)
|
||||
errors = ActiveModel::Errors.new(model).tap { |e| e.add(:id, 'some error') }
|
||||
allow(model).to receive(:errors).and_return(errors)
|
||||
end
|
||||
|
||||
mutation_request
|
||||
|
||||
expect(mutation_response['errors']).to match_array(['Id some error'])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ml::DestroyModelVersionService, feature_category: :mlops do
|
||||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:model) { create(:ml_models, project: project) }
|
||||
|
||||
let(:user) { project.owner }
|
||||
|
||||
subject(:execute_service) { described_class.new(model_version, user).execute }
|
||||
|
||||
describe '#execute' do
|
||||
context 'when model version exists' do
|
||||
let(:model_version) { create(:ml_model_versions, :with_package, model: model) }
|
||||
|
||||
it 'deletes the model version', :aggregate_failures do
|
||||
expect(execute_service).to be_success
|
||||
expect(execute_service.payload[:model_version]).to eq(model_version)
|
||||
expect(Ml::ModelVersion.find_by(id: model_version.id)).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when model version has no package' do
|
||||
let(:model_version) { create(:ml_model_versions, model: model) }
|
||||
|
||||
it 'does not trigger destroy package service', :aggregate_failures do
|
||||
expect(Packages::MarkPackageForDestructionService).not_to receive(:new)
|
||||
expect(execute_service).to be_success
|
||||
end
|
||||
end
|
||||
|
||||
context 'when package cannot be marked for destruction' do
|
||||
let(:model_version) { create(:ml_model_versions, :with_package, model: model) }
|
||||
let(:user) { nil }
|
||||
|
||||
it 'does not delete the model version', :aggregate_failures do
|
||||
is_expected.to be_error.and have_attributes(message: "You don't have access to this package")
|
||||
expect(Ml::ModelVersion.find_by(id: model_version.id)).to eq(model_version)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ml::ModelVersions::DeleteService, feature_category: :mlops do
|
||||
let_it_be(:valid_model_version) do
|
||||
create(:ml_model_versions, :with_package)
|
||||
end
|
||||
|
||||
let(:project) { valid_model_version.project }
|
||||
let(:user) { valid_model_version.project.owner }
|
||||
let(:name) { valid_model_version.name }
|
||||
let(:version) { valid_model_version.version }
|
||||
|
||||
subject(:execute_service) { described_class.new(project, name, version, user).execute }
|
||||
|
||||
describe '#execute' do
|
||||
context 'when model version exists' do
|
||||
it 'deletes the model version', :aggregate_failures do
|
||||
expect(execute_service).to be_success
|
||||
expect(Ml::ModelVersion.find_by(id: valid_model_version.id)).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when model version does not exist' do
|
||||
let(:version) { 'wrong-version' }
|
||||
|
||||
it { is_expected.to be_error.and have_attributes(message: 'Model not found') }
|
||||
end
|
||||
|
||||
context 'when model version has no package' do
|
||||
before do
|
||||
valid_model_version.update!(package: nil)
|
||||
end
|
||||
|
||||
it 'does not trigger destroy package service', :aggregate_failures do
|
||||
expect(Packages::MarkPackageForDestructionService).not_to receive(:new)
|
||||
expect(execute_service).to be_success
|
||||
end
|
||||
end
|
||||
|
||||
context 'when package cannot be marked for destruction' do
|
||||
before do
|
||||
allow_next_instance_of(Packages::MarkPackageForDestructionService) do |service|
|
||||
allow(service).to receive(:execute).and_return(ServiceResponse.error(message: 'error'))
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not delete the model version', :aggregate_failures do
|
||||
is_expected.to be_error.and have_attributes(message: 'error')
|
||||
expect(Ml::ModelVersion.find_by(id: valid_model_version.id)).to eq(valid_model_version)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,8 +1,5 @@
|
|||
---
|
||||
# It maps table_name to {index1: array_of_duplicate_indexes, index2: array_of_duplicate_indexes, ... }
|
||||
abuse_reports:
|
||||
idx_abuse_reports_user_id_status_and_category:
|
||||
- index_abuse_reports_on_user_id
|
||||
alert_management_http_integrations:
|
||||
index_http_integrations_on_project_and_endpoint:
|
||||
- index_alert_management_http_integrations_on_project_id
|
||||
|
|
|
|||
|
|
@ -77,12 +77,18 @@ RSpec.shared_examples 'extracts ref method' do
|
|||
expect(ref_extractor.extract_ref('v2.0.0')).to eq(['v2.0.0', ''])
|
||||
end
|
||||
|
||||
it 'extracts a valid commit ref' do
|
||||
it 'extracts a valid commit SHA1 ref without a path' do
|
||||
expect(ref_extractor.extract_ref('f4b14494ef6abf3d144c28e4af0c20143383e062')).to eq(
|
||||
['f4b14494ef6abf3d144c28e4af0c20143383e062', '']
|
||||
)
|
||||
end
|
||||
|
||||
it 'extracts a valid commit SHA256 ref without a path' do
|
||||
expect(ref_extractor.extract_ref('34627760127d5ff2a644771225af09bbd79f28a54a0a4c03c1881bf2c26dc13c')).to eq(
|
||||
['34627760127d5ff2a644771225af09bbd79f28a54a0a4c03c1881bf2c26dc13c', '']
|
||||
)
|
||||
end
|
||||
|
||||
it 'falls back to a primitive split for an invalid ref' do
|
||||
expect(ref_extractor.extract_ref('stable')).to eq(['stable', ''])
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BitbucketServerImport::ImportPullRequestNoteWorker, feature_category: :importers do
|
||||
subject(:worker) { described_class.new }
|
||||
|
||||
it_behaves_like Gitlab::BitbucketServerImport::ObjectImporter
|
||||
end
|
||||
Loading…
Reference in New Issue