Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
d5891ad110
commit
55583893ca
1
Gemfile
1
Gemfile
|
|
@ -743,6 +743,7 @@ gem 'telesignenterprise', '~> 2.2', feature_category: :insider_threat
|
|||
# BufferedIO patch
|
||||
# Updating this version will require updating scripts/allowed_warnings.txt
|
||||
gem 'net-protocol', '~> 0.1.3', feature_category: :shared
|
||||
gem "nkf", "~> 0.2.0", feature_category: :shared
|
||||
|
||||
# This is locked to 0.6.0 because we patch Net::HTTP#connect in
|
||||
# gems/gitlab-http/lib/net_http/connect_patch.rb.
|
||||
|
|
|
|||
|
|
@ -429,6 +429,8 @@
|
|||
{"name":"netrc","version":"0.11.0","platform":"ruby","checksum":"de1ce33da8c99ab1d97871726cba75151113f117146becbe45aa85cb3dabee3f"},
|
||||
{"name":"nio4r","version":"2.7.0","platform":"java","checksum":"3f2e515e928ceeef7668e1f64fc3bfef1417a5ec0908d8e69f2c6d486284e04d"},
|
||||
{"name":"nio4r","version":"2.7.0","platform":"ruby","checksum":"9586a685eca8246d6406e712a525e705d15bb88f709d78fc3f141e864df97276"},
|
||||
{"name":"nkf","version":"0.2.0","platform":"java","checksum":"3e6f022d1743a863bf05e936c7c2110be07ba1c593ea974df75d89e8bf7cc967"},
|
||||
{"name":"nkf","version":"0.2.0","platform":"ruby","checksum":"fbc151bda025451f627fafdfcb3f4f13d0b22ae11f58c6d3a2939c76c5f5f126"},
|
||||
{"name":"no_proxy_fix","version":"0.1.2","platform":"ruby","checksum":"4e9b4c31bb146de7fcf347dc1087bb13ac2039b56d50aa019e61036256abcd00"},
|
||||
{"name":"nokogiri","version":"1.18.2","platform":"aarch64-linux-gnu","checksum":"74e0f9a7487a30a2957f46c5113d58f836436b033c9906e0bc6fee9d8cdafabf"},
|
||||
{"name":"nokogiri","version":"1.18.2","platform":"aarch64-linux-musl","checksum":"99bcea596a80eaee99f2bae2596275641ea688262c1da32b4e90db6159e86477"},
|
||||
|
|
|
|||
|
|
@ -1200,6 +1200,7 @@ GEM
|
|||
net-ssh (7.3.0)
|
||||
netrc (0.11.0)
|
||||
nio4r (2.7.0)
|
||||
nkf (0.2.0)
|
||||
no_proxy_fix (0.1.2)
|
||||
nokogiri (1.18.2)
|
||||
mini_portile2 (~> 2.8.2)
|
||||
|
|
@ -2188,6 +2189,7 @@ DEPENDENCIES
|
|||
net-ldap (~> 0.17.1)
|
||||
net-ntp
|
||||
net-protocol (~> 0.1.3)
|
||||
nkf (~> 0.2.0)
|
||||
nokogiri (~> 1.18)
|
||||
oauth2 (~> 2.0)
|
||||
octokit (~> 9.0)
|
||||
|
|
|
|||
|
|
@ -432,6 +432,8 @@
|
|||
{"name":"netrc","version":"0.11.0","platform":"ruby","checksum":"de1ce33da8c99ab1d97871726cba75151113f117146becbe45aa85cb3dabee3f"},
|
||||
{"name":"nio4r","version":"2.7.0","platform":"java","checksum":"3f2e515e928ceeef7668e1f64fc3bfef1417a5ec0908d8e69f2c6d486284e04d"},
|
||||
{"name":"nio4r","version":"2.7.0","platform":"ruby","checksum":"9586a685eca8246d6406e712a525e705d15bb88f709d78fc3f141e864df97276"},
|
||||
{"name":"nkf","version":"0.2.0","platform":"java","checksum":"3e6f022d1743a863bf05e936c7c2110be07ba1c593ea974df75d89e8bf7cc967"},
|
||||
{"name":"nkf","version":"0.2.0","platform":"ruby","checksum":"fbc151bda025451f627fafdfcb3f4f13d0b22ae11f58c6d3a2939c76c5f5f126"},
|
||||
{"name":"no_proxy_fix","version":"0.1.2","platform":"ruby","checksum":"4e9b4c31bb146de7fcf347dc1087bb13ac2039b56d50aa019e61036256abcd00"},
|
||||
{"name":"nokogiri","version":"1.18.2","platform":"aarch64-linux-gnu","checksum":"74e0f9a7487a30a2957f46c5113d58f836436b033c9906e0bc6fee9d8cdafabf"},
|
||||
{"name":"nokogiri","version":"1.18.2","platform":"aarch64-linux-musl","checksum":"99bcea596a80eaee99f2bae2596275641ea688262c1da32b4e90db6159e86477"},
|
||||
|
|
|
|||
|
|
@ -1217,6 +1217,7 @@ GEM
|
|||
net-ssh (7.3.0)
|
||||
netrc (0.11.0)
|
||||
nio4r (2.7.0)
|
||||
nkf (0.2.0)
|
||||
no_proxy_fix (0.1.2)
|
||||
nokogiri (1.18.2)
|
||||
mini_portile2 (~> 2.8.2)
|
||||
|
|
@ -2223,6 +2224,7 @@ DEPENDENCIES
|
|||
net-ldap (~> 0.17.1)
|
||||
net-ntp
|
||||
net-protocol (~> 0.1.3)
|
||||
nkf (~> 0.2.0)
|
||||
nokogiri (~> 1.18)
|
||||
oauth2 (~> 2.0)
|
||||
octokit (~> 9.0)
|
||||
|
|
|
|||
|
|
@ -74,6 +74,10 @@ export default {
|
|||
required: false,
|
||||
default: null,
|
||||
},
|
||||
canAdminRunners: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
|
|
@ -209,10 +213,11 @@ export default {
|
|||
<template #title>{{ s__('Runners|Runners') }}</template>
|
||||
<template #actions>
|
||||
<runner-dashboard-link />
|
||||
<gl-button :href="newRunnerPath" variant="confirm">
|
||||
<gl-button v-if="canAdminRunners" :href="newRunnerPath" variant="confirm">
|
||||
{{ s__('Runners|New instance runner') }}
|
||||
</gl-button>
|
||||
<registration-dropdown
|
||||
v-if="canAdminRunners"
|
||||
:allow-registration-token="allowRegistrationToken"
|
||||
:registration-token="registrationToken"
|
||||
:type="$options.INSTANCE_TYPE"
|
||||
|
|
@ -245,7 +250,7 @@ export default {
|
|||
<runner-list
|
||||
:runners="runners.items"
|
||||
:loading="runnersLoading"
|
||||
:checkable="true"
|
||||
:checkable="canAdminRunners"
|
||||
@deleted="onDeleted"
|
||||
>
|
||||
<template #runner-job-status-badge="{ runner }">
|
||||
|
|
|
|||
|
|
@ -33,8 +33,13 @@ export const initAdminRunners = (selector = '#js-admin-runners') => {
|
|||
return null;
|
||||
}
|
||||
|
||||
const { newRunnerPath, allowRegistrationToken, registrationToken, tagSuggestionsPath } =
|
||||
el.dataset;
|
||||
const {
|
||||
newRunnerPath,
|
||||
allowRegistrationToken,
|
||||
registrationToken,
|
||||
tagSuggestionsPath,
|
||||
canAdminRunners,
|
||||
} = el.dataset;
|
||||
const { cacheConfig, typeDefs, localMutations } = createLocalState();
|
||||
|
||||
const apolloProvider = new VueApollo({
|
||||
|
|
@ -55,6 +60,7 @@ export const initAdminRunners = (selector = '#js-admin-runners') => {
|
|||
newRunnerPath,
|
||||
allowRegistrationToken: parseBoolean(allowRegistrationToken),
|
||||
registrationToken,
|
||||
canAdminRunners: parseBoolean(canAdminRunners),
|
||||
},
|
||||
});
|
||||
},
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ import * as Sentry from '~/sentry/sentry_browser_wrapper';
|
|||
import { createAlert } from '~/alert';
|
||||
import { s__ } from '~/locale';
|
||||
import Tracking from '~/tracking';
|
||||
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
import {
|
||||
DEFAULT_PAGE_SIZE,
|
||||
INSTRUMENT_TAB_LABELS,
|
||||
|
|
@ -46,7 +47,7 @@ export default {
|
|||
directives: {
|
||||
GlTooltip: GlTooltipDirective,
|
||||
},
|
||||
mixins: [Tracking.mixin()],
|
||||
mixins: [Tracking.mixin(), glFeatureFlagMixin()],
|
||||
provide() {
|
||||
return {
|
||||
currentTab: computed(() => this.currentTab),
|
||||
|
|
@ -138,6 +139,8 @@ export default {
|
|||
return !this.isLoading && this.todos.length === 0;
|
||||
},
|
||||
showMarkAllAsDone() {
|
||||
if (this.glFeatures.todosBulkActions) return false;
|
||||
|
||||
return this.currentTab === TABS_INDICES.pending && !this.showEmptyState;
|
||||
},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
<script>
|
||||
import { GlSprintf, GlSkeletonLoader } from '@gitlab/ui';
|
||||
import { n__ } from '~/locale';
|
||||
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
import StateContainer from '../state_container.vue';
|
||||
|
||||
|
|
@ -26,6 +27,8 @@ export default {
|
|||
data() {
|
||||
return {
|
||||
collapsed: this.glFeatures.mrReportsTab,
|
||||
findingsCount: 0,
|
||||
loadedCount: 0,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
|
|
@ -55,15 +58,23 @@ export default {
|
|||
].filter((w) => w);
|
||||
},
|
||||
collapsedSummaryText() {
|
||||
return null;
|
||||
return n__('%d findings', '%d findings', this.findingsCount);
|
||||
},
|
||||
statusIcon() {
|
||||
return 'success';
|
||||
if (this.loadedCount < this.widgets.length) return 'loading';
|
||||
|
||||
return 'warning';
|
||||
},
|
||||
isLoadingSummary() {
|
||||
return false;
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
onLoadedReport(findings) {
|
||||
this.findingsCount += findings;
|
||||
this.loadedCount += 1;
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
||||
|
|
@ -122,6 +133,7 @@ export default {
|
|||
:mr="mr"
|
||||
class="mr-widget-section"
|
||||
:class="{ 'gl-border-t gl-border-t-section': index > 0 }"
|
||||
@loaded="onLoadedReport"
|
||||
/>
|
||||
</div>
|
||||
</section>
|
||||
|
|
|
|||
|
|
@ -64,6 +64,7 @@ export default {
|
|||
return axios.get(this.mr.accessibilityReportPath).then((response) => {
|
||||
this.collapsedData = response.data;
|
||||
this.content = this.getContent(response.data);
|
||||
this.$emit('loaded', this.collapsedData?.summary?.errored || 0);
|
||||
|
||||
return response;
|
||||
});
|
||||
|
|
|
|||
|
|
@ -132,6 +132,7 @@ export default {
|
|||
}
|
||||
if (data) {
|
||||
this.collapsedData = data;
|
||||
this.$emit('loaded', this.collapsedData.new_errors.length);
|
||||
}
|
||||
return {
|
||||
headers,
|
||||
|
|
|
|||
|
|
@ -57,6 +57,8 @@ export default {
|
|||
});
|
||||
});
|
||||
|
||||
this.$emit('loaded', 0);
|
||||
|
||||
return artifacts;
|
||||
},
|
||||
error() {
|
||||
|
|
|
|||
|
|
@ -93,6 +93,8 @@ export default {
|
|||
this.terraformData.collapsed = formattedData;
|
||||
this.terraformData.expanded = [...valid, ...invalid];
|
||||
|
||||
this.$emit('loaded', this.terraformData.collapsed.invalid.length);
|
||||
|
||||
return {
|
||||
...res,
|
||||
data: formattedData,
|
||||
|
|
|
|||
|
|
@ -151,6 +151,7 @@ export default {
|
|||
},
|
||||
};
|
||||
this.suites = this.prepareSuites(this.collapsedData);
|
||||
this.$emit('loaded', summary.failed || 0);
|
||||
|
||||
return response;
|
||||
});
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ import {
|
|||
LINKED_CATEGORIES_MAP,
|
||||
WORK_ITEM_TYPE_VALUE_INCIDENT,
|
||||
WORK_ITEM_TYPE_VALUE_ISSUE,
|
||||
WORK_ITEM_TYPE_ENUM_INCIDENT,
|
||||
} from '~/work_items/constants';
|
||||
import {
|
||||
isAssigneesWidget,
|
||||
|
|
@ -119,7 +120,10 @@ export default {
|
|||
);
|
||||
},
|
||||
isIncident() {
|
||||
return this.issuable.workItemType?.name === WORK_ITEM_TYPE_VALUE_INCIDENT;
|
||||
return (
|
||||
this.issuable.workItemType?.name === WORK_ITEM_TYPE_VALUE_INCIDENT ||
|
||||
this.issuable.type === WORK_ITEM_TYPE_ENUM_INCIDENT
|
||||
);
|
||||
},
|
||||
isServiceDeskIssue() {
|
||||
return (
|
||||
|
|
|
|||
|
|
@ -2,17 +2,13 @@
|
|||
import { GlModal, GlFormGroup, GlFormSelect, GlAlert } from '@gitlab/ui';
|
||||
import { differenceBy } from 'lodash';
|
||||
import * as Sentry from '~/sentry/sentry_browser_wrapper';
|
||||
import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
|
||||
import { __, s__, sprintf } from '~/locale';
|
||||
import { findDesignWidget } from '~/work_items/utils';
|
||||
import { capitalizeFirstCharacter } from '~/lib/utils/text_utility';
|
||||
|
||||
import {
|
||||
WIDGET_TYPE_HIERARCHY,
|
||||
WORK_ITEMS_TYPE_MAP,
|
||||
WORK_ITEM_ALLOWED_CHANGE_TYPE_MAP,
|
||||
WORK_ITEM_TYPE_ENUM_OBJECTIVE,
|
||||
WORK_ITEM_TYPE_ENUM_KEY_RESULT,
|
||||
ALLOWED_CONVERSION_TYPES,
|
||||
WORK_ITEM_TYPE_ENUM_EPIC,
|
||||
WORK_ITEM_TYPE_VALUE_EPIC,
|
||||
sprintfWorkItem,
|
||||
|
|
@ -37,8 +33,6 @@ export default {
|
|||
actionCancel: {
|
||||
text: __('Cancel'),
|
||||
},
|
||||
mixins: [glFeatureFlagMixin()],
|
||||
inject: ['hasOkrsFeature'],
|
||||
props: {
|
||||
workItemId: {
|
||||
type: String,
|
||||
|
|
@ -96,7 +90,7 @@ export default {
|
|||
getEpicWidgetDefinitions: {
|
||||
type: Function,
|
||||
required: false,
|
||||
default: () => () => {},
|
||||
default: () => {},
|
||||
},
|
||||
},
|
||||
data() {
|
||||
|
|
@ -106,7 +100,6 @@ export default {
|
|||
warningMessage: '',
|
||||
changeTypeDisabled: true,
|
||||
hasDesigns: false,
|
||||
workItemFullPath: this.fullPath,
|
||||
typeFieldNote: '',
|
||||
};
|
||||
},
|
||||
|
|
@ -115,20 +108,15 @@ export default {
|
|||
query: namespaceWorkItemTypesQuery,
|
||||
variables() {
|
||||
return {
|
||||
fullPath: this.workItemFullPath,
|
||||
fullPath: this.fullPath,
|
||||
};
|
||||
},
|
||||
update(data) {
|
||||
return data.workspace?.workItemTypes?.nodes;
|
||||
return data.workspace?.workItemTypes?.nodes || [];
|
||||
},
|
||||
error(e) {
|
||||
this.throwError(e);
|
||||
},
|
||||
result() {
|
||||
if (this.selectedWorkItemType !== null) {
|
||||
this.validateWorkItemType();
|
||||
}
|
||||
},
|
||||
},
|
||||
hasDesigns: {
|
||||
query: getWorkItemDesignListQuery,
|
||||
|
|
@ -141,49 +129,38 @@ export default {
|
|||
update(data) {
|
||||
return findDesignWidget(data.workItem.widgets)?.designCollection?.designs.nodes?.length > 0;
|
||||
},
|
||||
skip() {
|
||||
return !this.workItemId;
|
||||
},
|
||||
error(e) {
|
||||
this.throwError(e);
|
||||
},
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
allowedConversionWorkItemTypes() {
|
||||
// The logic will be simplified once we implement
|
||||
// https://gitlab.com/gitlab-org/gitlab/-/issues/498656
|
||||
let allowedWorkItemTypes = [
|
||||
{ text: __('Select type'), value: null },
|
||||
...Object.entries(WORK_ITEMS_TYPE_MAP)
|
||||
.map(([key, value]) => ({
|
||||
text: value.value,
|
||||
value: key,
|
||||
}))
|
||||
.filter((item) => {
|
||||
if (item.text === this.workItemType) {
|
||||
return false;
|
||||
}
|
||||
// Keeping this separate for readability
|
||||
if (
|
||||
item.value === WORK_ITEM_TYPE_ENUM_OBJECTIVE ||
|
||||
item.value === WORK_ITEM_TYPE_ENUM_KEY_RESULT
|
||||
) {
|
||||
return this.isOkrsEnabled;
|
||||
}
|
||||
return WORK_ITEM_ALLOWED_CHANGE_TYPE_MAP.includes(item.value);
|
||||
}),
|
||||
];
|
||||
// Adding hardcoded EPIC till we have epic conversion support
|
||||
// https://gitlab.com/gitlab-org/gitlab/-/issues/478486
|
||||
if (this.allowedWorkItemTypesEE.length > 0) {
|
||||
allowedWorkItemTypes = [...allowedWorkItemTypes, ...this.allowedWorkItemTypesEE];
|
||||
}
|
||||
|
||||
return allowedWorkItemTypes;
|
||||
supportedConversionTypes() {
|
||||
return (
|
||||
this.workItemTypes
|
||||
?.find((type) => type.name === this.workItemType)
|
||||
?.supportedConversionTypes?.filter((item) => {
|
||||
// API is returning Incident, Requirement, Test Case, and Ticket in addition to required work items
|
||||
// As these types are not migrated, they are filtered out on the frontend
|
||||
// They will be added to the list as they are migrated
|
||||
// Discussion: https://gitlab.com/gitlab-org/gitlab/-/issues/498656#note_2263177119
|
||||
return ALLOWED_CONVERSION_TYPES.includes(item.name);
|
||||
})
|
||||
?.map((item) => ({
|
||||
text: item.name,
|
||||
value: item.id,
|
||||
})) || []
|
||||
);
|
||||
},
|
||||
isOkrsEnabled() {
|
||||
return this.hasOkrsFeature && this.glFeatures.okrsMvc;
|
||||
allowedConversionWorkItemTypes() {
|
||||
return [
|
||||
{
|
||||
text: __('Select type'),
|
||||
value: null,
|
||||
},
|
||||
...this.supportedConversionTypes,
|
||||
...this.allowedWorkItemTypesEE,
|
||||
];
|
||||
},
|
||||
selectedWorkItemTypeWidgetDefinitions() {
|
||||
return this.selectedWorkItemType?.value === WORK_ITEM_TYPE_ENUM_EPIC
|
||||
|
|
@ -313,7 +290,7 @@ export default {
|
|||
}
|
||||
return this.workItemTypes.find((widget) => widget.name === type)?.widgetDefinitions;
|
||||
},
|
||||
updateWorkItemFullPath(value) {
|
||||
updateWorkItemType(value) {
|
||||
this.typeFieldNote = '';
|
||||
|
||||
if (!value) {
|
||||
|
|
@ -326,8 +303,6 @@ export default {
|
|||
);
|
||||
|
||||
if (value === WORK_ITEM_TYPE_ENUM_EPIC) {
|
||||
// triggers the `workItemTypes` to fetch Epic widget definitions
|
||||
this.workItemFullPath = this.fullPath.substring(0, this.fullPath.lastIndexOf('/'));
|
||||
this.typeFieldNote = this.epicFieldNote;
|
||||
}
|
||||
this.validateWorkItemType();
|
||||
|
|
@ -378,7 +353,6 @@ export default {
|
|||
},
|
||||
show() {
|
||||
this.resetModal();
|
||||
this.changeTypeDisabled = true;
|
||||
this.$refs.modal.show();
|
||||
},
|
||||
hide() {
|
||||
|
|
@ -387,9 +361,8 @@ export default {
|
|||
},
|
||||
resetModal() {
|
||||
this.warningMessage = '';
|
||||
this.showDifferenceMessage = false;
|
||||
this.selectedWorkItemType = null;
|
||||
this.changeTypeDisabled = false;
|
||||
this.changeTypeDisabled = true;
|
||||
this.typeFieldNote = '';
|
||||
},
|
||||
},
|
||||
|
|
@ -419,7 +392,7 @@ export default {
|
|||
:value="selectedWorkItemTypeValue"
|
||||
width="md"
|
||||
:options="allowedConversionWorkItemTypes"
|
||||
@change="updateWorkItemFullPath"
|
||||
@change="updateWorkItemType"
|
||||
/>
|
||||
<p v-if="typeFieldNote" class="gl-text-subtle">{{ typeFieldNote }}</p>
|
||||
</gl-form-group>
|
||||
|
|
|
|||
|
|
@ -410,11 +410,11 @@ export const BASE_ALLOWED_CREATE_TYPES = [
|
|||
WORK_ITEM_TYPE_VALUE_TASK,
|
||||
];
|
||||
|
||||
export const WORK_ITEM_ALLOWED_CHANGE_TYPE_MAP = [
|
||||
WORK_ITEM_TYPE_ENUM_KEY_RESULT,
|
||||
WORK_ITEM_TYPE_ENUM_OBJECTIVE,
|
||||
WORK_ITEM_TYPE_ENUM_TASK,
|
||||
WORK_ITEM_TYPE_ENUM_ISSUE,
|
||||
export const ALLOWED_CONVERSION_TYPES = [
|
||||
WORK_ITEM_TYPE_VALUE_KEY_RESULT,
|
||||
WORK_ITEM_TYPE_VALUE_OBJECTIVE,
|
||||
WORK_ITEM_TYPE_VALUE_TASK,
|
||||
WORK_ITEM_TYPE_VALUE_ISSUE,
|
||||
];
|
||||
|
||||
export const WORK_ITEM_TYPE_NAME_MAP = {
|
||||
|
|
|
|||
|
|
@ -2,6 +2,10 @@ fragment WorkItemTypeFragment on WorkItemType {
|
|||
id
|
||||
name
|
||||
iconName
|
||||
supportedConversionTypes {
|
||||
id
|
||||
name
|
||||
}
|
||||
widgetDefinitions {
|
||||
type
|
||||
... on WorkItemWidgetDefinitionHierarchy {
|
||||
|
|
|
|||
|
|
@ -4,7 +4,9 @@ class Dashboard::TodosController < Dashboard::ApplicationController
|
|||
feature_category :notifications
|
||||
urgency :low
|
||||
|
||||
def index; end
|
||||
def index
|
||||
push_frontend_feature_flag(:todos_bulk_actions, current_user)
|
||||
end
|
||||
|
||||
def destroy
|
||||
todo = current_user.todos.find(params[:id])
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ module Ci
|
|||
end
|
||||
|
||||
def all_runners
|
||||
raise Gitlab::Access::AccessDeniedError unless @current_user&.can_admin_all_resources?
|
||||
raise Gitlab::Access::AccessDeniedError unless can?(@current_user, :read_admin_cicd)
|
||||
|
||||
Ci::Runner.all
|
||||
end
|
||||
|
|
|
|||
|
|
@ -4,17 +4,21 @@ module Packages
|
|||
module Conan
|
||||
class PackageFinder
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
include ActiveRecord::Sanitization::ClassMethods
|
||||
|
||||
MAX_PACKAGES_COUNT = 500
|
||||
WILDCARD = '*'
|
||||
SQL_WILDCARD = '%'
|
||||
|
||||
def initialize(current_user, params, project: nil)
|
||||
@current_user = current_user
|
||||
@name, @version, @username, _ = params[:query].to_s.split(%r{[@/]})
|
||||
@name, @version, @username, _ = params[:query].to_s.split(%r{[@/]}).map { |q| sanitize_sql(q) }
|
||||
@project = project
|
||||
end
|
||||
|
||||
def execute
|
||||
return ::Packages::Conan::Package.none unless name.present?
|
||||
return [] if name == SQL_WILDCARD && version == SQL_WILDCARD
|
||||
|
||||
packages
|
||||
end
|
||||
|
|
@ -23,13 +27,22 @@ module Packages
|
|||
|
||||
attr_reader :current_user, :name, :project, :version, :username
|
||||
|
||||
def sanitize_sql(query)
|
||||
sanitize_sql_like(query).tr(WILDCARD, SQL_WILDCARD) unless query.nil?
|
||||
end
|
||||
|
||||
def packages
|
||||
matching_packages = base
|
||||
.installable
|
||||
.preload_conan_metadatum
|
||||
.with_name_like(name)
|
||||
matching_packages = matching_packages.with_version(version) if version.present?
|
||||
matching_packages.limit_recent(MAX_PACKAGES_COUNT)
|
||||
packages = base.installable.preload_conan_metadatum.with_name_like(name)
|
||||
packages = by_version(packages) if version.present?
|
||||
packages.limit_recent(MAX_PACKAGES_COUNT)
|
||||
end
|
||||
|
||||
def by_version(packages)
|
||||
if version.include?(SQL_WILDCARD)
|
||||
packages.with_version_like(version)
|
||||
else
|
||||
packages.with_version(version)
|
||||
end
|
||||
end
|
||||
|
||||
def base
|
||||
|
|
|
|||
|
|
@ -192,6 +192,8 @@ module Types
|
|||
|
||||
field :source, GraphQL::Types::String, null: true, description: "Source of the pipeline."
|
||||
|
||||
field :type, GraphQL::Types::String, null: false, description: "Type of the pipeline."
|
||||
|
||||
field :child, GraphQL::Types::Boolean, null: false, method: :child?, description: "If the pipeline is a child or not."
|
||||
|
||||
field :latest, GraphQL::Types::Boolean, null: false, method: :latest?, calls_gitaly: true, description: "If the pipeline is the latest one or not."
|
||||
|
|
|
|||
|
|
@ -108,11 +108,11 @@ module Types
|
|||
end
|
||||
|
||||
def admin_url
|
||||
Gitlab::Routing.url_helpers.admin_runner_url(runner) if can_admin_runners?
|
||||
Gitlab::Routing.url_helpers.admin_runner_url(runner) if can_read_all_runners?
|
||||
end
|
||||
|
||||
def edit_admin_url
|
||||
Gitlab::Routing.url_helpers.edit_admin_runner_url(runner) if can_admin_runners?
|
||||
Gitlab::Routing.url_helpers.edit_admin_runner_url(runner) if can_admin_all_runners?
|
||||
end
|
||||
|
||||
def ephemeral_register_url
|
||||
|
|
@ -129,7 +129,7 @@ module Types
|
|||
end
|
||||
|
||||
def register_admin_url
|
||||
return unless can_admin_runners? && runner.registration_available?
|
||||
return unless can_admin_all_runners? && runner.registration_available?
|
||||
|
||||
Gitlab::Routing.url_helpers.register_admin_runner_url(runner)
|
||||
end
|
||||
|
|
@ -163,9 +163,13 @@ module Types
|
|||
|
||||
private
|
||||
|
||||
def can_admin_runners?
|
||||
def can_admin_all_runners?
|
||||
context[:current_user]&.can_admin_all_resources?
|
||||
end
|
||||
|
||||
def can_read_all_runners?
|
||||
context[:current_user]&.can?(:read_admin_cicd)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -59,18 +59,26 @@ module Ci
|
|||
end
|
||||
end
|
||||
|
||||
def admin_runners_data_attributes
|
||||
{
|
||||
def admin_runners_app_data
|
||||
data = {
|
||||
# Runner install help page is external, located at
|
||||
# https://gitlab.com/gitlab-org/gitlab-runner
|
||||
runner_install_help_page: 'https://docs.gitlab.com/runner/install/',
|
||||
new_runner_path: new_admin_runner_path,
|
||||
allow_registration_token: Gitlab::CurrentSettings.allow_runner_registration_token.to_s,
|
||||
registration_token: Gitlab::CurrentSettings.runners_registration_token,
|
||||
registration_token: nil,
|
||||
online_contact_timeout_secs: ::Ci::Runner::ONLINE_CONTACT_TIMEOUT.to_i,
|
||||
stale_timeout_secs: ::Ci::Runner::STALE_TIMEOUT.to_i,
|
||||
tag_suggestions_path: tag_list_admin_runners_path(format: :json)
|
||||
tag_suggestions_path: tag_list_admin_runners_path(format: :json),
|
||||
can_admin_runners: false.to_s
|
||||
}
|
||||
|
||||
return data unless current_user.can_admin_all_resources?
|
||||
|
||||
data.merge({
|
||||
registration_token: Gitlab::CurrentSettings.runners_registration_token,
|
||||
can_admin_runners: true.to_s
|
||||
})
|
||||
end
|
||||
|
||||
def group_shared_runners_settings_data(group)
|
||||
|
|
|
|||
|
|
@ -481,7 +481,7 @@ module Ci
|
|||
|
||||
{}.tap do |result|
|
||||
result[:publish] = ExpandVariables.expand(options[:publish].to_s, -> {
|
||||
pages_base_variables.sort_and_expand_all
|
||||
base_variables.sort_and_expand_all
|
||||
})
|
||||
end
|
||||
end
|
||||
|
|
@ -566,13 +566,8 @@ module Ci
|
|||
def variables
|
||||
strong_memoize(:variables) do
|
||||
Gitlab::Ci::Variables::Collection.new
|
||||
.concat(persisted_variables)
|
||||
.concat(dependency_proxy_variables)
|
||||
.concat(job_jwt_variables)
|
||||
.concat(scoped_variables)
|
||||
.concat(base_variables)
|
||||
.concat(pages_variables)
|
||||
.concat(job_variables)
|
||||
.concat(persisted_environment_variables)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -654,26 +649,24 @@ module Ci
|
|||
|
||||
def pages_variables
|
||||
::Gitlab::Ci::Variables::Collection.new.tap do |variables|
|
||||
next variables unless pages_generator? && Feature.enabled?(:fix_pages_ci_variables, project)
|
||||
|
||||
pages_url_builder = ::Gitlab::Pages::UrlBuilder.new(project, pages)
|
||||
next variables unless Feature.enabled?(:fix_pages_ci_variables, project)
|
||||
|
||||
variables
|
||||
.append(key: 'CI_PAGES_HOSTNAME', value: pages_url_builder.hostname)
|
||||
.append(key: 'CI_PAGES_URL', value: pages_url_builder.pages_url)
|
||||
.append(key: 'CI_PAGES_HOSTNAME', value: project.pages_hostname)
|
||||
.append(key: 'CI_PAGES_URL', value: project.pages_url(pages))
|
||||
end
|
||||
end
|
||||
|
||||
# This method can be used for expanding extra variables in both CE and EE `build.pages`.
|
||||
# It includes all variables that can be used as a value in pages_options.
|
||||
def pages_base_variables
|
||||
def base_variables
|
||||
::Gitlab::Ci::Variables::Collection.new
|
||||
.concat(persisted_variables)
|
||||
.concat(dependency_proxy_variables)
|
||||
.concat(job_jwt_variables)
|
||||
.concat(scoped_variables)
|
||||
.concat(job_variables)
|
||||
.concat(persisted_environment_variables)
|
||||
end
|
||||
strong_memoize_attr :pages_base_variables
|
||||
strong_memoize_attr :base_variables
|
||||
|
||||
def features
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1278,6 +1278,24 @@ module Ci
|
|||
merge_request? && target_sha.present?
|
||||
end
|
||||
|
||||
def tag_pipeline?
|
||||
tag?
|
||||
end
|
||||
|
||||
def type
|
||||
if merge_train_pipeline?
|
||||
'merge_train'
|
||||
elsif merged_result_pipeline?
|
||||
'merged_result'
|
||||
elsif merge_request?
|
||||
'merge_request'
|
||||
elsif tag_pipeline?
|
||||
'tag'
|
||||
else
|
||||
'branch'
|
||||
end
|
||||
end
|
||||
|
||||
def merge_request_ref?
|
||||
MergeRequest.merge_request_ref?(ref)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -81,6 +81,7 @@ class Packages::Package < ApplicationRecord
|
|||
|
||||
scope :search_by_name, ->(query) { fuzzy_search(query, [:name], use_minimum_char_limit: false) }
|
||||
scope :with_version, ->(version) { where(version: version) }
|
||||
scope :with_version_like, ->(version) { where(arel_table[:version].matches(version)) }
|
||||
scope :without_version_like, ->(version) { where.not(arel_table[:version].matches(version)) }
|
||||
scope :with_package_type, ->(package_type) { where(package_type: package_type) }
|
||||
scope :without_package_type, ->(package_type) { where.not(package_type: package_type) }
|
||||
|
|
|
|||
|
|
@ -3408,7 +3408,11 @@ class Project < ApplicationRecord
|
|||
end
|
||||
|
||||
def pages_url(options = nil)
|
||||
Gitlab::Pages::UrlBuilder.new(self, options).pages_url
|
||||
pages_url_builder(options).pages_url
|
||||
end
|
||||
|
||||
def pages_hostname(options = nil)
|
||||
pages_url_builder(options).hostname
|
||||
end
|
||||
|
||||
def uploads_sharding_key
|
||||
|
|
@ -3417,6 +3421,12 @@ class Project < ApplicationRecord
|
|||
|
||||
private
|
||||
|
||||
def pages_url_builder(options = nil)
|
||||
strong_memoize_with(:pages_url_builder, options) do
|
||||
Gitlab::Pages::UrlBuilder.new(self, options)
|
||||
end
|
||||
end
|
||||
|
||||
def with_redis(&block)
|
||||
Gitlab::Redis::Cache.with(&block)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -35,6 +35,7 @@ class Ci::PipelineEntity < Grape::Entity
|
|||
expose :detached_merge_request_pipeline?, as: :detached_merge_request_pipeline
|
||||
expose :merged_result_pipeline?, as: :merge_request_pipeline # deprecated, use merged_result_pipeline going forward
|
||||
expose :merged_result_pipeline?, as: :merged_result_pipeline
|
||||
expose :type, as: :type
|
||||
end
|
||||
|
||||
expose :details do
|
||||
|
|
|
|||
|
|
@ -3,12 +3,25 @@
|
|||
module Packages
|
||||
module Conan
|
||||
class SearchService < BaseService
|
||||
include ActiveRecord::Sanitization::ClassMethods
|
||||
|
||||
WILDCARD = '*'
|
||||
SLASH = '/'
|
||||
MAX_WILDCARD_COUNT = 5
|
||||
MAX_SEARCH_TERM_LENGTH = 200
|
||||
|
||||
ERRORS = {
|
||||
search_term_too_long: ServiceResponse.error(
|
||||
message: "Search term length must be less than #{MAX_SEARCH_TERM_LENGTH} characters.",
|
||||
reason: :invalid_parameter
|
||||
),
|
||||
too_many_wildcards: ServiceResponse.error(
|
||||
message: "Too many wildcards in search term. Maximum is #{MAX_WILDCARD_COUNT}.",
|
||||
reason: :invalid_parameter
|
||||
)
|
||||
}.freeze
|
||||
|
||||
def execute
|
||||
return ERRORS[:search_term_too_long] if search_term_too_long?
|
||||
return ERRORS[:too_many_wildcards] if too_many_wildcards?
|
||||
|
||||
ServiceResponse.success(payload: { results: search_results })
|
||||
end
|
||||
|
||||
|
|
@ -20,26 +33,28 @@ module Packages
|
|||
search_packages
|
||||
end
|
||||
|
||||
def wildcard_query?
|
||||
params[:query] == WILDCARD
|
||||
def query
|
||||
params[:query]
|
||||
end
|
||||
|
||||
def sanitized_query
|
||||
@sanitized_query ||= sanitize_sql_like(params[:query].delete(WILDCARD))
|
||||
def wildcard_query?
|
||||
query == WILDCARD
|
||||
end
|
||||
|
||||
def search_term_too_long?
|
||||
query.length > MAX_SEARCH_TERM_LENGTH
|
||||
end
|
||||
|
||||
def too_many_wildcards?
|
||||
query.count(WILDCARD) > MAX_WILDCARD_COUNT
|
||||
end
|
||||
|
||||
def search_packages
|
||||
::Packages::Conan::PackageFinder
|
||||
.new(current_user, { query: build_query }, project: project)
|
||||
.new(current_user, { query: query }, project: project)
|
||||
.execute
|
||||
.map(&:conan_recipe)
|
||||
end
|
||||
|
||||
def build_query
|
||||
return "#{sanitized_query}%" if params[:query].end_with?(WILDCARD) && !sanitized_query.end_with?(SLASH)
|
||||
|
||||
sanitized_query
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,4 +3,4 @@
|
|||
|
||||
#div{ data: { event_tracking_load: 'true', event_tracking: 'view_admin_runners_pageload' } }
|
||||
|
||||
#js-admin-runners{ data: admin_runners_data_attributes }
|
||||
#js-admin-runners{ data: admin_runners_app_data }
|
||||
|
|
|
|||
|
|
@ -41,6 +41,7 @@ module Gitlab
|
|||
config.active_support.executor_around_test_case = nil # New default is true
|
||||
config.active_support.isolation_level = nil # New default is thread
|
||||
config.active_support.key_generator_hash_digest_class = nil # New default is OpenSSL::Digest::SHA256
|
||||
config.active_support.cache_format_version = nil
|
||||
|
||||
# Rails 6.1
|
||||
config.action_dispatch.cookies_same_site_protection = nil # New default is :lax
|
||||
|
|
@ -88,7 +89,6 @@ module Gitlab
|
|||
require_dependency Rails.root.join('lib/gitlab/exceptions_app')
|
||||
|
||||
unless ::Gitlab.next_rails?
|
||||
config.active_support.cache_format_version = nil
|
||||
config.active_support.disable_to_s_conversion = false # New default is true
|
||||
config.active_support.use_rfc4122_namespaced_uuids = true
|
||||
ActiveSupport.to_time_preserves_timezone = false
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
name: todos_bulk_actions
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/16564
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/179403
|
||||
rollout_issue_url:
|
||||
milestone: '17.9'
|
||||
group: group::personal productivity
|
||||
type: wip
|
||||
default_enabled: false
|
||||
|
|
@ -19,7 +19,8 @@ deprecators.silenced = silenced
|
|||
|
||||
ignored_warnings = [
|
||||
/Your `secret_key_base` is configured in `Rails.application.secrets`, which is deprecated in favor of/,
|
||||
/Please pass the (coder|class) as a keyword argument/
|
||||
/Please pass the (coder|class) as a keyword argument/,
|
||||
/Support for `config.active_support.cache_format_version/
|
||||
]
|
||||
|
||||
if Rails.env.production?
|
||||
|
|
|
|||
|
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class CreateSiphonEvents < ClickHouse::Migration
|
||||
def up
|
||||
execute <<-SQL
|
||||
CREATE TABLE IF NOT EXISTS siphon_events
|
||||
(
|
||||
project_id Nullable(Int64),
|
||||
author_id Int64,
|
||||
created_at DateTime64(6, 'UTC'),
|
||||
updated_at DateTime64(6, 'UTC'),
|
||||
action Int8,
|
||||
target_type LowCardinality(String) DEFAULT '',
|
||||
group_id Nullable(Int64),
|
||||
fingerprint Nullable(String),
|
||||
id Int64,
|
||||
target_id Nullable(Int64),
|
||||
imported_from Int8 DEFAULT 0,
|
||||
personal_namespace_id Nullable(Int64),
|
||||
_siphon_replicated_at DateTime64(6, 'UTC') DEFAULT now(),
|
||||
_siphon_deleted Bool DEFAULT FALSE
|
||||
)
|
||||
ENGINE = ReplacingMergeTree(_siphon_replicated_at, _siphon_deleted)
|
||||
PRIMARY KEY id
|
||||
SQL
|
||||
end
|
||||
|
||||
def down
|
||||
execute <<-SQL
|
||||
DROP TABLE IF EXISTS siphon_events
|
||||
SQL
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class CreateSiphonMilestones < ClickHouse::Migration
|
||||
def up
|
||||
execute <<-SQL
|
||||
CREATE TABLE IF NOT EXISTS siphon_milestones
|
||||
(
|
||||
id Int64,
|
||||
title String,
|
||||
project_id Nullable(Int64),
|
||||
description Nullable(String),
|
||||
due_date Nullable(Date32),
|
||||
created_at Nullable(DateTime64(6, 'UTC')),
|
||||
updated_at Nullable(DateTime64(6, 'UTC')),
|
||||
state LowCardinality(String) DEFAULT '',
|
||||
iid Nullable(Int64),
|
||||
title_html Nullable(String),
|
||||
description_html Nullable(String),
|
||||
start_date Nullable(Date32),
|
||||
cached_markdown_version Nullable(Int64),
|
||||
group_id Nullable(Int64),
|
||||
lock_version Int64 DEFAULT 0,
|
||||
_siphon_replicated_at DateTime64(6, 'UTC') DEFAULT now(),
|
||||
_siphon_deleted Bool DEFAULT FALSE
|
||||
)
|
||||
ENGINE = ReplacingMergeTree(_siphon_replicated_at, _siphon_deleted)
|
||||
PRIMARY KEY id
|
||||
SQL
|
||||
end
|
||||
|
||||
def down
|
||||
execute <<-SQL
|
||||
DROP TABLE IF EXISTS siphon_milestones
|
||||
SQL
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddVulnerabilityFindingSignaturesProjectIdNotNullConstraint < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '17.9'
|
||||
|
||||
def up
|
||||
add_not_null_constraint :vulnerability_finding_signatures, :project_id, validate: false
|
||||
end
|
||||
|
||||
def down
|
||||
remove_not_null_constraint :vulnerability_finding_signatures, :project_id
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class PrepareVulnerabilityFindingSignaturesProjectIdNotNullValidation < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '17.9'
|
||||
|
||||
CONSTRAINT_NAME = :check_f4ab9ffc5a
|
||||
|
||||
def up
|
||||
prepare_async_check_constraint_validation :vulnerability_finding_signatures, name: CONSTRAINT_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
unprepare_async_check_constraint_validation :vulnerability_finding_signatures, name: CONSTRAINT_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class FixGroupScimIdentities < Gitlab::Database::Migration[2.2]
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
|
||||
milestone '17.9'
|
||||
|
||||
BATCH_SIZE = 150
|
||||
|
||||
def up
|
||||
group_scim_identities = define_batchable_model('group_scim_identities')
|
||||
group_scim_identities.each_batch(of: BATCH_SIZE) do |relation|
|
||||
execute <<~SQL
|
||||
UPDATE group_scim_identities
|
||||
SET active = scim_identities.active
|
||||
FROM scim_identities
|
||||
WHERE group_scim_identities.temp_source_id = scim_identities.id
|
||||
AND group_scim_identities.active <> scim_identities.active
|
||||
AND group_scim_identities.id in (#{relation.dup.reselect(:id).to_sql})
|
||||
SQL
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
# no op
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddStatusCheckResponsesProjectIdNotNullConstraint < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '17.9'
|
||||
|
||||
def up
|
||||
add_not_null_constraint :status_check_responses, :project_id, validate: false
|
||||
end
|
||||
|
||||
def down
|
||||
remove_not_null_constraint :status_check_responses, :project_id
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class PrepareStatusCheckResponsesProjectIdNotNullValidation < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '17.9'
|
||||
|
||||
CONSTRAINT_NAME = :check_29114cce9c
|
||||
|
||||
def up
|
||||
prepare_async_check_constraint_validation :status_check_responses, name: CONSTRAINT_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
unprepare_async_check_constraint_validation :status_check_responses, name: CONSTRAINT_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddApprovalMergeRequestRulesProjectIdNotNullConstraint < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '17.9'
|
||||
|
||||
def up
|
||||
add_not_null_constraint :approval_merge_request_rules, :project_id, validate: false
|
||||
end
|
||||
|
||||
def down
|
||||
remove_not_null_constraint :approval_merge_request_rules, :project_id
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class PrepareApprovalMergeRequestRulesProjectIdNotNullValidation < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '17.9'
|
||||
|
||||
CONSTRAINT_NAME = :check_90caab37e0
|
||||
|
||||
def up
|
||||
prepare_async_check_constraint_validation :approval_merge_request_rules, name: CONSTRAINT_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
unprepare_async_check_constraint_validation :approval_merge_request_rules, name: CONSTRAINT_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddWorkItemParentLinksNamespaceIdNotNullConstraint < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '17.9'
|
||||
|
||||
def up
|
||||
add_not_null_constraint :work_item_parent_links, :namespace_id, validate: false
|
||||
end
|
||||
|
||||
def down
|
||||
remove_not_null_constraint :work_item_parent_links, :namespace_id
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class PrepareWorkItemParentLinksNamespaceIdNotNullValidation < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '17.9'
|
||||
|
||||
CONSTRAINT_NAME = :check_e9c0111985
|
||||
|
||||
def up
|
||||
prepare_async_check_constraint_validation :work_item_parent_links, name: CONSTRAINT_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
unprepare_async_check_constraint_validation :work_item_parent_links, name: CONSTRAINT_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
29c114849bdc940d759488cb3eaf6ca156a8e4b3d829ca7369159f97c4492153
|
||||
|
|
@ -0,0 +1 @@
|
|||
28cb3e896fc7a391738440a357cd8a22c42a34400acea139a7ca8ac827280434
|
||||
|
|
@ -0,0 +1 @@
|
|||
d3d053c46c0c394d1383da1762ba2425ace382633bc79f51e7b5916de7ba764b
|
||||
|
|
@ -0,0 +1 @@
|
|||
d236dba35450741c600bfcac6ef3f1d35e48fe7ae684310293d752bc30795717
|
||||
|
|
@ -0,0 +1 @@
|
|||
5e0909195d51310124d4fc574a54990e4c7d7b398c3db5f41c1983597c187ff5
|
||||
|
|
@ -0,0 +1 @@
|
|||
3f753e33d98db5d1b392e98c6a8f53aadb3a546525cdd1be7d08ad4d37a6685b
|
||||
|
|
@ -0,0 +1 @@
|
|||
743bf317dde899e0007671372e17d3e7ab018d68451341b923079baf0cd29a04
|
||||
|
|
@ -0,0 +1 @@
|
|||
38f97ea8b802cadc8a9229b28e873c3341b3fe541a2d9fba227cfacb59d86175
|
||||
|
|
@ -0,0 +1 @@
|
|||
5f17ac64ac5efe1e338631f6e028ccfed095aeb916ebabd39fa7b2d299fb62f9
|
||||
|
|
@ -26540,6 +26540,9 @@ ALTER TABLE ONLY chat_teams
|
|||
ALTER TABLE approval_project_rules_users
|
||||
ADD CONSTRAINT check_26058e3982 CHECK ((project_id IS NOT NULL)) NOT VALID;
|
||||
|
||||
ALTER TABLE status_check_responses
|
||||
ADD CONSTRAINT check_29114cce9c CHECK ((project_id IS NOT NULL)) NOT VALID;
|
||||
|
||||
ALTER TABLE workspaces
|
||||
ADD CONSTRAINT check_2a89035b04 CHECK ((personal_access_token_id IS NOT NULL)) NOT VALID;
|
||||
|
||||
|
|
@ -26564,6 +26567,9 @@ ALTER TABLE p_ci_pipeline_variables
|
|||
ALTER TABLE terraform_state_versions
|
||||
ADD CONSTRAINT check_84142902f6 CHECK ((project_id IS NOT NULL)) NOT VALID;
|
||||
|
||||
ALTER TABLE approval_merge_request_rules
|
||||
ADD CONSTRAINT check_90caab37e0 CHECK ((project_id IS NOT NULL)) NOT VALID;
|
||||
|
||||
ALTER TABLE sbom_occurrences_vulnerabilities
|
||||
ADD CONSTRAINT check_a02e48df9c CHECK ((project_id IS NOT NULL)) NOT VALID;
|
||||
|
||||
|
|
@ -26588,9 +26594,15 @@ ALTER TABLE web_hook_logs
|
|||
ALTER TABLE vulnerability_finding_evidences
|
||||
ADD CONSTRAINT check_e8f37f70eb CHECK ((project_id IS NOT NULL)) NOT VALID;
|
||||
|
||||
ALTER TABLE work_item_parent_links
|
||||
ADD CONSTRAINT check_e9c0111985 CHECK ((namespace_id IS NOT NULL)) NOT VALID;
|
||||
|
||||
ALTER TABLE project_relation_exports
|
||||
ADD CONSTRAINT check_f461e3537f CHECK ((project_id IS NOT NULL)) NOT VALID;
|
||||
|
||||
ALTER TABLE vulnerability_finding_signatures
|
||||
ADD CONSTRAINT check_f4ab9ffc5a CHECK ((project_id IS NOT NULL)) NOT VALID;
|
||||
|
||||
ALTER TABLE approval_merge_request_rule_sources
|
||||
ADD CONSTRAINT check_f82666a937 CHECK ((project_id IS NOT NULL)) NOT VALID;
|
||||
|
||||
|
|
|
|||
|
|
@ -187,7 +187,6 @@ following in a console on the AI gateway container:
|
|||
|
||||
```shell
|
||||
docker exec -it <ai-gateway-container> sh
|
||||
echo $AIGW_AUTH__BYPASS_EXTERNAL # must be true
|
||||
echo $AIGW_CUSTOM_MODELS__ENABLED # must be true
|
||||
```
|
||||
|
||||
|
|
|
|||
|
|
@ -105,6 +105,7 @@ Example response:
|
|||
## Revoke a token
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/170421) in GitLab 17.7 [with a flag](../../administration/feature_flags.md) named `api_admin_token_revoke`. Disabled by default.
|
||||
> - [Cluster agent tokens added](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/178211) in GitLab 17.9.
|
||||
|
||||
FLAG:
|
||||
The availability of this feature is controlled by a feature flag.
|
||||
|
|
@ -118,6 +119,7 @@ Revokes a given token. This endpoint supports the following token types:
|
|||
- [Group access tokens](../../security/tokens/index.md#group-access-tokens)
|
||||
- [Deploy tokens](../../user/project/deploy_tokens/index.md)
|
||||
- [Feed tokens](../../security/tokens/index.md#feed-token)
|
||||
- [Cluster agent tokens](../../security/tokens/index.md#gitlab-cluster-agent-tokens)
|
||||
|
||||
```plaintext
|
||||
DELETE /api/v4/admin/token
|
||||
|
|
|
|||
|
|
@ -31870,6 +31870,7 @@ Returns [`UserMergeRequestInteraction`](#usermergerequestinteraction).
|
|||
| <a id="pipelinetrigger"></a>`trigger` | [`Boolean!`](#boolean) | If the pipeline was created by a Trigger request. |
|
||||
| <a id="pipelinetriggeredbypath"></a>`triggeredByPath` | [`String`](#string) | Path that triggered the pipeline. |
|
||||
| <a id="pipelinetroubleshootjobwithai"></a>`troubleshootJobWithAi` | [`Boolean!`](#boolean) | If the user can troubleshoot jobs of a pipeline. |
|
||||
| <a id="pipelinetype"></a>`type` | [`String!`](#string) | Type of the pipeline. |
|
||||
| <a id="pipelineupdatedat"></a>`updatedAt` | [`Time!`](#time) | Timestamp of the pipeline's last activity. |
|
||||
| <a id="pipelineupstream"></a>`upstream` | [`Pipeline`](#pipeline) | Pipeline that triggered the pipeline. |
|
||||
| <a id="pipelineuser"></a>`user` | [`UserCore`](#usercore) | Pipeline user. |
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ The Docker image for the AI gateway is around 340 MB (compressed) for the `linux
|
|||
|
||||
Find the GitLab official Docker image at:
|
||||
|
||||
- AI Gateway Docker image on Container Registry:
|
||||
- AI Gateway Docker image on Container Registry:
|
||||
- [Stable](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/container_registry/3809284)
|
||||
- [Nightly](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/container_registry/8086262)
|
||||
- AI Gateway Docker image on DockerHub:
|
||||
|
|
@ -63,16 +63,6 @@ To fix this, set the appropriate certificate bundle path in the Docker container
|
|||
|
||||
Replace `/path/to/ca-bundle.pem` with the actual path to your certificate bundle.
|
||||
|
||||
### Additional Configuration
|
||||
|
||||
If you encounter authentication issues during health checks, bypass the authentication temporarily by setting the following environment variable:
|
||||
|
||||
```shell
|
||||
-e AIGW_AUTH__BYPASS_EXTERNAL=true
|
||||
```
|
||||
|
||||
This can be helpful for troubleshooting, but you should disable this after fixing the issues.
|
||||
|
||||
## Install using the AI gateway Helm chart
|
||||
|
||||
Prerequisites:
|
||||
|
|
@ -258,7 +248,7 @@ resources:
|
|||
- Dedicate nodes or instances exclusively to the AI Gateway to prevent resource competition with other services.
|
||||
|
||||
## Scaling Strategies
|
||||
|
||||
|
||||
- Use Kubernetes HPA to scale pods based on real-time metrics like:
|
||||
- Average CPU utilization exceeding 50%.
|
||||
- Request latency consistently above 500ms.
|
||||
|
|
|
|||
Binary file not shown.
|
After Width: | Height: | Size: 16 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 70 KiB |
|
|
@ -0,0 +1,180 @@
|
|||
---
|
||||
stage: Plan
|
||||
group: Project Management
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
|
||||
---
|
||||
|
||||
# Tutorial: Set up an issue board for a team stand-up
|
||||
|
||||
DETAILS:
|
||||
**Tier:** Premium, Ultimate
|
||||
**Offering:** GitLab.com, GitLab Self-Managed, GitLab Dedicated
|
||||
|
||||
<!-- vale gitlab_base.FutureTense = NO -->
|
||||
|
||||
This tutorial shows you how to create and configure an issue board that helps teams run effective stand-ups.
|
||||
When you're done, you'll have a board that supports this workflow:
|
||||
|
||||
1. Before each stand-up:
|
||||
|
||||
- Team members update the status of their assigned tasks.
|
||||
- Tasks flow through lists that show their current state: planning, in development, blocked, or complete.
|
||||
|
||||
1. During the stand-up:
|
||||
|
||||
- Team members discuss in-progress work and share completion timelines.
|
||||
- Blocked tasks are identified and plans made to resolve them.
|
||||
- New tasks are added to the ready list.
|
||||
- Completed tasks are celebrated and moved to done.
|
||||
|
||||
The result: Your team leaves each stand-up aligned on sprint progress, with risks identified and plans to address them.
|
||||
|
||||
To set up an issue board for team stand-ups:
|
||||
|
||||
1. [Create a group](#create-a-group)
|
||||
1. [Create a project](#create-a-project)
|
||||
1. [Create labels](#create-labels)
|
||||
1. [Create the team stand-up board](#create-the-team-stand-up-board)
|
||||
1. [Configure your board lists](#configure-your-board-lists)
|
||||
1. [Create issues for team tasks](#create-issues-for-team-tasks)
|
||||
|
||||
## Before you begin
|
||||
|
||||
- If you're using an existing group for this tutorial, make sure you have at least the Planner role for the group.
|
||||
- If you're using an existing project for this tutorial, make sure you have at least the Planner role for the project.
|
||||
|
||||
## Create a group
|
||||
|
||||
Start by creating a group to manage one or more related projects.
|
||||
Groups let you manage member access and share settings across projects.
|
||||
|
||||
To create a group:
|
||||
|
||||
1. On the left sidebar, at the top, select **Create new** (**{plus}**) and **New group**.
|
||||
1. Select **Create group**.
|
||||
1. Complete the fields:
|
||||
- In **Group name**, enter `Paperclip Software Factory`.
|
||||
1. Select **Create group**.
|
||||
|
||||
You've created an empty group.
|
||||
Next, create a project to store your issues and code.
|
||||
|
||||
## Create a project
|
||||
|
||||
Projects store your code repositories and issues you'll use for planning.
|
||||
All development work happens in projects.
|
||||
|
||||
To create a blank project:
|
||||
|
||||
1. In your group, in the upper-right corner, select **New project**.
|
||||
1. Select **Create blank project**.
|
||||
1. Complete the fields:
|
||||
- In **Project name**, enter `Paperclip Assistant`.
|
||||
1. Select **Create project**.
|
||||
|
||||
## Create labels
|
||||
|
||||
To track issue status during stand-ups, you need workflow labels.
|
||||
|
||||
Create these labels in your `Paperclip Software Factory` group rather than the project.
|
||||
Group-level labels are available to all projects in the group, which helps establish consistent
|
||||
workflows across teams.
|
||||
|
||||
To create a workflow label:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your **Paperclip Software Factory** group.
|
||||
1. Select **Manage > Labels**.
|
||||
1. Select **New label**.
|
||||
1. In the **Title** field, enter the label name.
|
||||
1. Optional. Select a background color or enter a hex color value.
|
||||
1. Select **Create label**.
|
||||
|
||||
Repeat these steps to create all the workflow labels:
|
||||
|
||||
- `workflow::planning breakdown`
|
||||
- `workflow::ready for development`
|
||||
- `workflow::in development`
|
||||
- `workflow::ready for review`
|
||||
- `workflow::in review`
|
||||
- `workflow::blocked`
|
||||
- `workflow::verification`
|
||||
- `workflow::complete`
|
||||
|
||||
## Create the team stand-up board
|
||||
|
||||
Create your board in the group so you can manage issues from any project in the group.
|
||||
|
||||
To create a group issue board:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your **Paperclip Software Factory** group.
|
||||
1. Select **Plan > Issue boards**.
|
||||
1. In the upper-left corner, select the dropdown list with the current board name.
|
||||
1. Select **Create new board**.
|
||||
1. Complete the fields:
|
||||
- In **Title**, enter `Team stand-up`.
|
||||
- Select both **Show the Open list** and **Show the Closed list**.
|
||||
1. Select **Create board**.
|
||||
|
||||
### Add workflow lists to your board
|
||||
|
||||
1. In the upper-right corner, select **Add list** (**{plus}**).
|
||||
1. From **New list**, select **Label**.
|
||||
1. From the **Value** dropdown list, select a workflow label.
|
||||
1. Select **Add to board**.
|
||||
1. Repeat steps 1-4 for each workflow label.
|
||||
|
||||
Your board now has lists for each workflow state, though they'll be empty until you add issues in later steps.
|
||||
|
||||

|
||||
|
||||
## Configure your board lists
|
||||
|
||||
You can customize your board by setting filters that apply to all lists.
|
||||
For example, you can show issues only from the current iteration or with specific labels.
|
||||
|
||||
To configure your board:
|
||||
|
||||
1. On your team stand-up board, select **Configure board** (**{settings}**).
|
||||
1. Complete any of these fields to filter issues:
|
||||
- **Milestone**: to show issues from a specific milestone.
|
||||
- **Assignee**: to show issues assigned to specific team members.
|
||||
- **Labels**: to show issues with specific labels.
|
||||
- **Weight**: to show issues with specific weight values.
|
||||
- **Iteration**: to show issues from the current iteration.
|
||||
1. Select **Create board**.
|
||||
|
||||
Your board now shows only issues that match your filters.
|
||||
For example, if you select a milestone, only issues assigned to that milestone appear in the board lists.
|
||||
|
||||

|
||||
|
||||
## Create issues for team tasks
|
||||
|
||||
You can create issues directly from your board during the team stand-up.
|
||||
|
||||
To create an issue:
|
||||
|
||||
1. On your team stand-up board, in the `workflow::ready for development` list, select **Create new issue** (**{plus}**).
|
||||
1. Complete the fields:
|
||||
- In **Title**, enter `Redesign user profile page`
|
||||
- From the **Projects** dropdown list, select **Paperclip Software Factory / Paperclip Assistant**
|
||||
1. Select **Create issue**.
|
||||
|
||||
The issue is created with the `workflow::ready for development` label because you created it in that list.
|
||||
|
||||
### Add metadata to the issue
|
||||
|
||||
1. On the issue card, select anywhere except the title.
|
||||
1. On the right sidebar, in the field you want to update, select **Edit**.
|
||||
1. Select your changes.
|
||||
1. Select any area outside the field to save.
|
||||
|
||||
Congratulations!
|
||||
You've set up a team stand-up board that helps track work and facilitate discussions.
|
||||
Your team can now use this board to run effective stand-ups.
|
||||
|
||||
## Related topics
|
||||
|
||||
- [Plan and track work tutorials](../plan_and_track.md).
|
||||
- [Stand-ups, retrospectives, and velocity](../scrum_events/standups_retrospectives_velocity.md)
|
||||
- [Product Development Flow](https://handbook.gitlab.com/handbook/product-development-flow/): how we use workflow labels at GitLab.
|
||||
|
|
@ -20,5 +20,6 @@ issues, epics, and more.
|
|||
| [Set up a project for idea management](idea_management/index.md) | Use an issue board and scoped labels to manage ideas in a team. | **{star}** |
|
||||
| [Set up a project for issue triage](issue_triage/index.md) | Use labels to set up a project for issue triage. | **{star}** |
|
||||
| [Set up issue boards for team hand-off](boards_for_teams/index.md) | Use issue boards and scoped labels to set up collaboration across many teams. | **{star}** |
|
||||
| [Set up an issue board for team stand-up](boards_for_standups/index.md) | Use issue boards and workflow labels to facilitate team stand-ups. | **{star}** |
|
||||
| <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [Epics and issue boards](https://www.youtube.com/watch?v=eQUnHwbKEkY) | Find out how to use epics and issue boards for project management. | |
|
||||
| <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [Portfolio Planning - Portfolio Management](https://www.youtube.com/watch?v=d9scVJUIF4c) | Find out how manage your portfolio with requirements, issues, epics, milestones, and time tracking. | |
|
||||
|
|
|
|||
|
|
@ -295,3 +295,7 @@ If you're experiencing high volatility, you can explore the following:
|
|||
that mirrors the workflow stages that a story progresses through in a sprint.
|
||||
This report can help focus discussions in your retrospectives on the specific workflow stages
|
||||
that take the longest during your sprint cycles.
|
||||
|
||||
## Related topics
|
||||
|
||||
- [Tutorial: Set up an issue board for a team stand-up](../boards_for_standups/index.md#tutorial-set-up-an-issue-board-for-a-team-stand-up)
|
||||
|
|
|
|||
|
|
@ -25,9 +25,8 @@ where:
|
|||
- Usernames and group names can contain only letters (`a-zA-Z`), digits (`0-9`), emoji, underscores (`_`), dots (`.`), parentheses (`()`), dashes (`-`), or spaces.
|
||||
- Project names can contain only letters (`a-zA-Z`), digits (`0-9`), emoji, underscores (`_`), dots (`.`), pluses (`+`), dashes (`-`), or spaces.
|
||||
- Usernames and project or group slugs:
|
||||
- Must start with a letter (`a-zA-Z`) or digit (`0-9`).
|
||||
- Must start and end with a letter (`a-zA-Z`) or digit (`0-9`).
|
||||
- Must not contain consecutive special characters.
|
||||
- Cannot start or end with a special character.
|
||||
- Cannot end in `.git` or `.atom`.
|
||||
- Can contain only letters (`a-zA-Z`), digits (`0-9`), underscores (`_`), dots (`.`), or dashes (`-`).
|
||||
|
||||
|
|
|
|||
|
|
@ -78,6 +78,7 @@ module API
|
|||
detail 'This feature was introduced in GitLab 12.4'
|
||||
success code: 200
|
||||
failure [
|
||||
{ code: 400, message: 'Bad Request' },
|
||||
{ code: 404, message: 'Not Found' }
|
||||
]
|
||||
tags %w[conan_packages]
|
||||
|
|
@ -91,9 +92,10 @@ module API
|
|||
route_setting :authorization, skip_job_token_policies: true
|
||||
|
||||
get 'conans/search', urgency: :low do
|
||||
service = ::Packages::Conan::SearchService.new(search_project, current_user, query: params[:q]).execute
|
||||
response = ::Packages::Conan::SearchService.new(search_project, current_user, query: params[:q]).execute
|
||||
bad_request!(response.message) if response.error?
|
||||
|
||||
service.payload
|
||||
response.payload
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -10,6 +10,8 @@ module Authn
|
|||
attr_reader :revocable, :source
|
||||
|
||||
def initialize(plaintext, source)
|
||||
return unless self.class.prefix?(plaintext)
|
||||
|
||||
@revocable = ::Clusters::AgentToken.find_by_token(plaintext)
|
||||
@source = source
|
||||
end
|
||||
|
|
@ -18,10 +20,11 @@ module Authn
|
|||
::API::Entities::Clusters::AgentToken
|
||||
end
|
||||
|
||||
def revoke!(_current_user)
|
||||
def revoke!(current_user)
|
||||
raise ::Authn::AgnosticTokenIdentifier::NotFoundError, 'Not Found' if revocable.blank?
|
||||
|
||||
raise ::Authn::AgnosticTokenIdentifier::UnsupportedTokenError, 'Unsupported token type'
|
||||
service = ::Clusters::AgentTokens::RevokeService.new(token: revocable, current_user: current_user)
|
||||
service.execute
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,46 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BackgroundMigration
|
||||
# Sets the `namespace_id` of the existing `vulnerability_reads` records
|
||||
class BackfillNamespaceIdOfVulnerabilityReads < BatchedMigrationJob
|
||||
operation_name :set_namespace_id
|
||||
feature_category :database
|
||||
|
||||
UPDATE_SQL = <<~SQL
|
||||
UPDATE
|
||||
vulnerability_reads
|
||||
SET
|
||||
namespace_id = sub_query.namespace_id
|
||||
FROM
|
||||
(%<subquery>s) as sub_query
|
||||
WHERE
|
||||
vulnerability_reads.vulnerability_id = sub_query.vulnerability_id
|
||||
SQL
|
||||
|
||||
def perform
|
||||
# This allow_cross_joins_across_databases call will never be removed.
|
||||
# The definition of this migration cannot function after the decomposition
|
||||
# of the Sec database. As such, it must be finalised before the decomposition.
|
||||
Gitlab::Database.allow_cross_joins_across_databases(
|
||||
url: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/164505'
|
||||
) do
|
||||
each_sub_batch do |sub_batch|
|
||||
update_query = update_query_for(sub_batch)
|
||||
|
||||
connection.execute(update_query)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def update_query_for(sub_batch)
|
||||
subquery = sub_batch.select("vulnerability_reads.vulnerability_id, projects.namespace_id")
|
||||
.joins("INNER JOIN projects ON projects.id = vulnerability_reads.project_id")
|
||||
|
||||
format(UPDATE_SQL, subquery: subquery.to_sql)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -279,7 +279,10 @@ module Gitlab
|
|||
return unless db_config
|
||||
|
||||
db_config_name = db_config.name.delete_suffix(LoadBalancing::LoadBalancer::REPLICA_SUFFIX)
|
||||
primary_model = self.database_base_models.fetch(db_config_name.to_sym)
|
||||
|
||||
primary_model = self.database_base_models[db_config_name.to_sym]
|
||||
|
||||
return unless primary_model
|
||||
|
||||
self.schemas_to_base_models.select do |_, child_models|
|
||||
child_models.any? do |child_model|
|
||||
|
|
|
|||
|
|
@ -3,6 +3,21 @@
|
|||
module Gitlab
|
||||
module Patch
|
||||
module RedisCacheStore
|
||||
# The initialize calls retrieve_pool_options method:
|
||||
# https://github.com/rails/rails/blob/v7.1.5.1/activesupport/lib/active_support/cache/redis_cache_store.rb#L149
|
||||
# In Rails 7.1 the method changed and now it always returns something
|
||||
#
|
||||
# - https://github.com/rails/rails/blob/v7.0.8.7/activesupport/lib/active_support/cache.rb#L183
|
||||
# - https://github.com/rails/rails/blob/v7.1.5.1/activesupport/lib/active_support/cache.rb#L206
|
||||
#
|
||||
# As a result, an unexpected connection pool is initialized.
|
||||
# This path always initializes redis without a connection pool, the pool is initialized in a wrapper.
|
||||
def initialize(*args, **kwargs)
|
||||
super
|
||||
|
||||
@redis = self.class.build_redis(redis: kwargs[:redis])
|
||||
end
|
||||
|
||||
# We will try keep patched code explicit and matching the original signature in
|
||||
# https://github.com/rails/rails/blob/v7.1.3.4/activesupport/lib/active_support/cache/redis_cache_store.rb#L324
|
||||
def read_multi_entries(names, **options)
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ module Gitlab
|
|||
yield multistore
|
||||
end
|
||||
end
|
||||
alias_method :then, :with
|
||||
|
||||
def multistore_pool
|
||||
@multistore_pool ||= MultiStoreConnectionPool.new(size: pool_size, name: pool_name) { multistore }
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def oci_repository_path_regex_message
|
||||
"can only include non-accented letters, digits, '_', '-' and '.'. It must not start with '-', end in '.', '.git', or '.atom'."
|
||||
"can only include non-accented letters, digits, '_', '-' and '.'. It must not start with '-', '_', or '.', nor end with '-', '_', '.', '.git', or '.atom'."
|
||||
end
|
||||
|
||||
def group_name_regex
|
||||
|
|
|
|||
|
|
@ -4,7 +4,12 @@ module Gitlab
|
|||
module Sessions
|
||||
module CacheStoreCoder
|
||||
extend self
|
||||
include ActiveSupport::Cache::Coders::Rails61Coder
|
||||
|
||||
if ::Gitlab.next_rails?
|
||||
include ActiveSupport::Cache::SerializerWithFallback[:marshal_6_1]
|
||||
else
|
||||
include ActiveSupport::Cache::Coders::Rails61Coder
|
||||
end
|
||||
|
||||
def load(payload)
|
||||
unmarshalled = super
|
||||
|
|
|
|||
|
|
@ -299,6 +299,11 @@ msgid_plural "%d findings must be resolved"
|
|||
msgstr[0] ""
|
||||
msgstr[1] ""
|
||||
|
||||
msgid "%d findings"
|
||||
msgid_plural "%d findings"
|
||||
msgstr[0] ""
|
||||
msgstr[1] ""
|
||||
|
||||
msgid "%d fork"
|
||||
msgid_plural "%d forks"
|
||||
msgstr[0] ""
|
||||
|
|
@ -1021,6 +1026,9 @@ msgstr ""
|
|||
msgid "%{labelStart}Project:%{labelEnd} %{project}"
|
||||
msgstr ""
|
||||
|
||||
msgid "%{labelStart}Report Type:%{labelEnd} %{reportType}"
|
||||
msgstr ""
|
||||
|
||||
msgid "%{labelStart}Scanner:%{labelEnd} %{scanner}"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -47736,6 +47744,9 @@ msgstr ""
|
|||
msgid "Reports|New"
|
||||
msgstr ""
|
||||
|
||||
msgid "Reports|Report Type"
|
||||
msgstr ""
|
||||
|
||||
msgid "Reports|See test results while the pipeline is running"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -33,3 +33,9 @@ Done in [0-9]+ms.
|
|||
|
||||
# The next major version of the view_component gem will require Rails >= 7.1
|
||||
Support for Rails versions < 7.1 is deprecated and will be removed from ViewComponent 4.0.0
|
||||
|
||||
# Migration to the new cache version is in progress
|
||||
DEPRECATION WARNING: Support for `config.active_support.cache_format_version = 6.1` has been deprecated and will be removed in Rails 7.2.
|
||||
Check the Rails upgrade guide at https://guides.rubyonrails.org/upgrading_ruby_on_rails.html#new-activesupport-cache-serialization-format
|
||||
for more information on how to upgrade.
|
||||
called from
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
FactoryBot.define do
|
||||
factory :ci_platform_metric do
|
||||
recorded_at { Time.zone.now }
|
||||
platform_target { generate(:title) }
|
||||
count { SecureRandom.random_number(100) + 1 }
|
||||
end
|
||||
end
|
||||
|
|
@ -465,42 +465,54 @@ RSpec.describe 'Dashboard Todos', :js, feature_category: :notifications do
|
|||
end
|
||||
|
||||
describe '"Mark all as done" button' do
|
||||
context 'with no pending todos' do
|
||||
it 'does not show' do
|
||||
visit dashboard_todos_path
|
||||
expect(page).not_to have_content 'Mark all as done'
|
||||
end
|
||||
it 'does not show' do
|
||||
create_todo
|
||||
visit dashboard_todos_path
|
||||
expect(page).not_to have_content 'Mark all as done'
|
||||
end
|
||||
|
||||
context 'with pending todos' do
|
||||
let_it_be(:self_assigned) { create_todo(author: user, target: issue) }
|
||||
let_it_be(:self_marked) { create_todo(author: user, target: issue2, action: :marked) }
|
||||
let_it_be(:other_assigned) { create_todo(author: user2, target: issue3) }
|
||||
context 'with todos_bulk_actions feature disabled' do
|
||||
before do
|
||||
stub_feature_flags(todos_bulk_actions: false)
|
||||
end
|
||||
|
||||
context 'with no filters applied' do
|
||||
it 'marks all pending todos as done' do
|
||||
context 'with no pending todos' do
|
||||
it 'does not show' do
|
||||
visit dashboard_todos_path
|
||||
click_on 'Mark all as done'
|
||||
|
||||
expect(page).to have_content 'Not sure where to go next?'
|
||||
within('.gl-toast') do
|
||||
expect(page).to have_content 'Marked 3 to-dos as done'
|
||||
find('a.gl-toast-action', text: 'Undo').click
|
||||
end
|
||||
expect(page).to have_content 'Restored 3 to-dos'
|
||||
expect(page).to have_selector('ul[data-testid=todo-item-list-container] li', count: 3)
|
||||
expect(page).not_to have_content 'Mark all as done'
|
||||
end
|
||||
end
|
||||
|
||||
context 'with filters applied' do
|
||||
it 'only marks the filtered todos as done' do
|
||||
visit dashboard_todos_path(author_id: user.id)
|
||||
click_on 'Mark all as done'
|
||||
context 'with pending todos' do
|
||||
let_it_be(:self_assigned) { create_todo(author: user, target: issue) }
|
||||
let_it_be(:self_marked) { create_todo(author: user, target: issue2, action: :marked) }
|
||||
let_it_be(:other_assigned) { create_todo(author: user2, target: issue3) }
|
||||
|
||||
expect(page).to have_content 'Sorry, your filter produced no results'
|
||||
click_on 'Clear'
|
||||
expect(page).to have_selector('ul[data-testid=todo-item-list-container] li', count: 1)
|
||||
expect(page).to have_content(other_assigned.author.name)
|
||||
context 'with no filters applied' do
|
||||
it 'marks all pending todos as done' do
|
||||
visit dashboard_todos_path
|
||||
click_on 'Mark all as done'
|
||||
|
||||
expect(page).to have_content 'Not sure where to go next?'
|
||||
within('.gl-toast') do
|
||||
expect(page).to have_content 'Marked 3 to-dos as done'
|
||||
find('a.gl-toast-action', text: 'Undo').click
|
||||
end
|
||||
expect(page).to have_content 'Restored 3 to-dos'
|
||||
expect(page).to have_selector('ul[data-testid=todo-item-list-container] li', count: 3)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with filters applied' do
|
||||
it 'only marks the filtered todos as done' do
|
||||
visit dashboard_todos_path(author_id: user.id)
|
||||
click_on 'Mark all as done'
|
||||
|
||||
expect(page).to have_content 'Sorry, your filter produced no results'
|
||||
click_on 'Clear'
|
||||
expect(page).to have_selector('ul[data-testid=todo-item-list-container] li', count: 1)
|
||||
expect(page).to have_content(other_assigned.author.name)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -13,9 +13,32 @@ RSpec.describe ::Packages::Conan::PackageFinder, feature_category: :package_regi
|
|||
let_it_be(:errored_package) { create(:conan_package, :error, project: project) }
|
||||
let_it_be(:private_package) { create(:conan_package, project: private_project) }
|
||||
|
||||
describe '#initialize', :aggregate_failures do
|
||||
let(:query) { 'a*p*a/1.*.3@name*ace1+pr*ct-1/stable' }
|
||||
let(:params) { { query: query } }
|
||||
|
||||
subject { described_class.new(user, params) }
|
||||
|
||||
it 'uses sql wildcards' do
|
||||
expect(subject.send(:name)).to eq('a%p%a')
|
||||
expect(subject.send(:version)).to eq('1.%.3')
|
||||
expect(subject.send(:username)).to eq('name%ace1+pr%ct-1')
|
||||
end
|
||||
|
||||
context 'with query containing special characters' do
|
||||
let(:query) { '\ /\n\\ " ' }
|
||||
|
||||
it 'escapes sql characters' do
|
||||
expect(subject.send(:name)).to eq('\\\\ ')
|
||||
expect(subject.send(:version)).to eq('\\\\n\\\\ " ')
|
||||
expect(subject.send(:username)).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#execute' do
|
||||
context 'without package user name' do
|
||||
let(:query) { "#{conan_package.name.split('/').first[0, 3]}%" }
|
||||
let(:query) { "#{conan_package.name.split('/').first[0, 3]}*" }
|
||||
let(:finder) { described_class.new(user, params) }
|
||||
let(:params) { { query: query } }
|
||||
|
||||
|
|
@ -76,6 +99,18 @@ RSpec.describe ::Packages::Conan::PackageFinder, feature_category: :package_regi
|
|||
end
|
||||
end
|
||||
|
||||
context 'with partial version' do
|
||||
let_it_be(:conan_package3) do
|
||||
create(:conan_package, project: project, name: conan_package.name, version: '1.2.3')
|
||||
end
|
||||
|
||||
let(:query) { "#{conan_package3.name}/1.*.3" }
|
||||
|
||||
it 'matches the correct package' do
|
||||
expect(subject).to match_array([conan_package3])
|
||||
end
|
||||
end
|
||||
|
||||
context 'with nil query' do
|
||||
let(:query) { nil }
|
||||
|
||||
|
|
@ -114,7 +149,7 @@ RSpec.describe ::Packages::Conan::PackageFinder, feature_category: :package_regi
|
|||
end
|
||||
|
||||
context 'with package user name' do
|
||||
let(:query) { "#{conan_package.name.split('/').first[0, 3]}%" }
|
||||
let(:query) { "#{conan_package.name.split('/').first[0, 3]}*" }
|
||||
let(:finder) { described_class.new(user, params) }
|
||||
let(:params) { { query: package.conan_recipe } }
|
||||
|
||||
|
|
|
|||
|
|
@ -92,6 +92,11 @@ describe('AdminRunnersApp', () => {
|
|||
let wrapper;
|
||||
const showToast = jest.fn();
|
||||
|
||||
const defaultProps = {
|
||||
newRunnerPath,
|
||||
canAdminRunners: true,
|
||||
};
|
||||
|
||||
const findRunnerStats = () => wrapper.findComponent(RunnerStats);
|
||||
const findRunnerActionsCell = () => wrapper.findComponent(RunnerActionsCell);
|
||||
const findRegistrationDropdown = () => wrapper.findComponent(RegistrationDropdown);
|
||||
|
|
@ -101,6 +106,7 @@ describe('AdminRunnersApp', () => {
|
|||
const findRunnerPagination = () => extendedWrapper(wrapper.findComponent(RunnerPagination));
|
||||
const findRunnerPaginationNext = () => findRunnerPagination().findByText('Next');
|
||||
const findRunnerFilteredSearchBar = () => wrapper.findComponent(RunnerFilteredSearchBar);
|
||||
const findNewInstanceRunnerButton = () => wrapper.findByText('New instance runner');
|
||||
|
||||
const createComponent = ({
|
||||
props = {},
|
||||
|
|
@ -121,7 +127,7 @@ describe('AdminRunnersApp', () => {
|
|||
wrapper = mountFn(AdminRunnersApp, {
|
||||
apolloProvider: createMockApollo(handlers, {}, cacheConfig),
|
||||
propsData: {
|
||||
newRunnerPath,
|
||||
...defaultProps,
|
||||
...props,
|
||||
},
|
||||
provide: {
|
||||
|
|
@ -158,18 +164,44 @@ describe('AdminRunnersApp', () => {
|
|||
showToast.mockReset();
|
||||
});
|
||||
|
||||
it('shows the runner registration token instructions', () => {
|
||||
createComponent({
|
||||
props: {
|
||||
describe('runner registration dropdown', () => {
|
||||
it('shows the runner registration token instructions', () => {
|
||||
createComponent({
|
||||
props: {
|
||||
allowRegistrationToken: true,
|
||||
registrationToken: mockRegistrationToken,
|
||||
},
|
||||
});
|
||||
|
||||
expect(findRegistrationDropdown().props()).toEqual({
|
||||
allowRegistrationToken: true,
|
||||
registrationToken: mockRegistrationToken,
|
||||
},
|
||||
type: INSTANCE_TYPE,
|
||||
});
|
||||
});
|
||||
|
||||
expect(findRegistrationDropdown().props()).toEqual({
|
||||
allowRegistrationToken: true,
|
||||
registrationToken: mockRegistrationToken,
|
||||
type: INSTANCE_TYPE,
|
||||
describe('when canAdminRunners prop is false', () => {
|
||||
it('is not shown', () => {
|
||||
createComponent({ props: { canAdminRunners: false } });
|
||||
|
||||
expect(findRegistrationDropdown().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('new instance runner button', () => {
|
||||
it('is shown', () => {
|
||||
createComponent();
|
||||
|
||||
expect(findNewInstanceRunnerButton().exists()).toBe(true);
|
||||
});
|
||||
|
||||
describe('when canAdminRunners prop is false', () => {
|
||||
it('is not shown', () => {
|
||||
createComponent({ props: { canAdminRunners: false } });
|
||||
|
||||
expect(findNewInstanceRunnerButton().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -465,12 +497,18 @@ describe('AdminRunnersApp', () => {
|
|||
|
||||
describe('Bulk delete', () => {
|
||||
describe('Before runners are deleted', () => {
|
||||
beforeEach(async () => {
|
||||
it('runner list is checkable', async () => {
|
||||
await createComponent({ mountFn: mountExtended });
|
||||
|
||||
expect(findRunnerList().props('checkable')).toBe(true);
|
||||
});
|
||||
|
||||
it('runner list is checkable', () => {
|
||||
expect(findRunnerList().props('checkable')).toBe(true);
|
||||
describe('when canAdminRunners prop is false', () => {
|
||||
it('runner list is not checkable', async () => {
|
||||
await createComponent({ props: { canAdminRunners: false }, mountFn: mountExtended });
|
||||
|
||||
expect(findRunnerList().props('checkable')).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ RSpec.describe "Work items", '(JavaScript fixtures)', type: :request, feature_ca
|
|||
include JavaScriptFixturesHelpers
|
||||
|
||||
let_it_be(:group) { create(:group, :public) }
|
||||
let_it_be(:project) { create(:project, :public, namespace: group) }
|
||||
let_it_be(:user) { create(:user) }
|
||||
|
||||
let(:namespace_work_item_types_query_path) { 'work_items/graphql/namespace_work_item_types.query.graphql' }
|
||||
|
|
@ -15,7 +16,7 @@ RSpec.describe "Work items", '(JavaScript fixtures)', type: :request, feature_ca
|
|||
it 'graphql/work_items/namespace_work_item_types.query.graphql.json' do
|
||||
query = get_graphql_query_as_string(namespace_work_item_types_query_path)
|
||||
|
||||
post_graphql(query, current_user: user, variables: { fullPath: group.full_path })
|
||||
post_graphql(query, current_user: user, variables: { fullPath: project.full_path })
|
||||
|
||||
expect_graphql_errors_to_be_empty
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import { nextTick } from 'vue';
|
||||
import { GlSprintf } from '@gitlab/ui';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import App from '~/vue_merge_request_widget/components/widget/app.vue';
|
||||
|
|
@ -22,6 +24,7 @@ describe('MR Widget App', () => {
|
|||
...mr,
|
||||
},
|
||||
},
|
||||
stubs: { GlSprintf },
|
||||
});
|
||||
};
|
||||
|
||||
|
|
@ -78,5 +81,20 @@ describe('MR Widget App', () => {
|
|||
|
||||
expect(wrapper.findByTestId('reports-widgets-container').isVisible()).toBe(true);
|
||||
});
|
||||
|
||||
it('shows findings count after widget emits loaded event', async () => {
|
||||
createComponent({
|
||||
mr: { testResultsPath: 'path/to/testResultsPath' },
|
||||
provide: { glFeatures: { mrReportsTab: true } },
|
||||
});
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
wrapper.findComponent(MrTestReportWidget).vm.$emit('loaded', 10);
|
||||
|
||||
await nextTick();
|
||||
|
||||
expect(wrapper.findComponent(StateContainer).text()).toContain('10 findings');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -41,6 +41,16 @@ describe('Accessibility widget', () => {
|
|||
mock.restore();
|
||||
});
|
||||
|
||||
it('emits loaded event', async () => {
|
||||
mockApi(HTTP_STATUS_OK, accessibilityReportResponseErrors);
|
||||
|
||||
createComponent();
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(wrapper.emitted('loaded')[0]).toContain(5);
|
||||
});
|
||||
|
||||
describe('summary', () => {
|
||||
it('displays loading text', () => {
|
||||
mockApi(HTTP_STATUS_OK, accessibilityReportResponseErrors);
|
||||
|
|
|
|||
|
|
@ -37,6 +37,10 @@ describe('vue_merge_request_widget/widgets/security_reports/mr_widget_security_r
|
|||
await waitForPromises();
|
||||
});
|
||||
|
||||
it('emits loaded event', () => {
|
||||
expect(wrapper.emitted('loaded')[0]).toContain(0);
|
||||
});
|
||||
|
||||
it('displays the correct message', () => {
|
||||
expect(wrapper.findByText('Security scans have run').exists()).toBe(true);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -47,6 +47,16 @@ describe('Terraform extension', () => {
|
|||
mock.restore();
|
||||
});
|
||||
|
||||
it('emits loaded event', async () => {
|
||||
mockPollingApi(HTTP_STATUS_OK, plans, {});
|
||||
|
||||
createComponent();
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(wrapper.emitted('loaded')[0]).toContain(2);
|
||||
});
|
||||
|
||||
describe('summary', () => {
|
||||
describe('while loading', () => {
|
||||
const loadingText = 'Loading Terraform reports...';
|
||||
|
|
|
|||
|
|
@ -84,6 +84,15 @@ describe('Test report extension', () => {
|
|||
mock.restore();
|
||||
});
|
||||
|
||||
it('emits loaded event', async () => {
|
||||
mockApi(HTTP_STATUS_OK, newFailedTestReports);
|
||||
createComponent();
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(wrapper.emitted('loaded')[0]).toContain(2);
|
||||
});
|
||||
|
||||
describe('summary', () => {
|
||||
describe('loading state', () => {
|
||||
it('displays loading state initially', () => {
|
||||
|
|
|
|||
|
|
@ -814,4 +814,17 @@ describe('IssuableItem', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('redirects to incident webUrl on row click when issuable item is not a work item', async () => {
|
||||
wrapper = createComponent({
|
||||
preventRedirect: true,
|
||||
showCheckbox: false,
|
||||
issuable: { ...mockIssuable, type: 'INCIDENT', namespace: { fullPath: 'gitlab-org/gitlab' } },
|
||||
});
|
||||
|
||||
await findIssuableItemWrapper().trigger('click');
|
||||
|
||||
expect(wrapper.emitted('select-issuable')).not.toBeDefined();
|
||||
expect(visitUrl).toHaveBeenCalledWith(mockIssuable.webUrl);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -12,13 +12,7 @@ import WorkItemChangeTypeModal from '~/work_items/components/work_item_change_ty
|
|||
import namespaceWorkItemTypesQuery from '~/work_items/graphql/namespace_work_item_types.query.graphql';
|
||||
import convertWorkItemMutation from '~/work_items/graphql/work_item_convert.mutation.graphql';
|
||||
import getWorkItemDesignListQuery from '~/work_items/components/design_management/graphql/design_collection.query.graphql';
|
||||
import {
|
||||
WORK_ITEM_TYPE_ENUM_KEY_RESULT,
|
||||
WORK_ITEM_TYPE_VALUE_KEY_RESULT,
|
||||
WORK_ITEM_TYPE_VALUE_TASK,
|
||||
WORK_ITEM_TYPE_VALUE_ISSUE,
|
||||
WORK_ITEM_WIDGETS_NAME_MAP,
|
||||
} from '~/work_items/constants';
|
||||
import { WORK_ITEM_TYPE_VALUE_TASK, WORK_ITEM_TYPE_VALUE_ISSUE } from '~/work_items/constants';
|
||||
|
||||
import {
|
||||
convertWorkItemMutationResponse,
|
||||
|
|
@ -33,10 +27,20 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
let wrapper;
|
||||
|
||||
const typesQuerySuccessHandler = jest.fn().mockResolvedValue(namespaceWorkItemTypesQueryResponse);
|
||||
const keyResultTypeId =
|
||||
namespaceWorkItemTypesQueryResponse.data.workspace.workItemTypes.nodes.find(
|
||||
(type) => type.name === WORK_ITEM_TYPE_VALUE_KEY_RESULT,
|
||||
).id;
|
||||
const issueTypeId = namespaceWorkItemTypesQueryResponse.data.workspace.workItemTypes.nodes.find(
|
||||
(type) => type.name === WORK_ITEM_TYPE_VALUE_ISSUE,
|
||||
).id;
|
||||
const taskTypeId = namespaceWorkItemTypesQueryResponse.data.workspace.workItemTypes.nodes.find(
|
||||
(type) => type.name === WORK_ITEM_TYPE_VALUE_TASK,
|
||||
).id;
|
||||
namespaceWorkItemTypesQueryResponse.data.workspace.workItemTypes.nodes
|
||||
.find((item) => item.name === WORK_ITEM_TYPE_VALUE_TASK)
|
||||
.widgetDefinitions.splice(
|
||||
namespaceWorkItemTypesQueryResponse.data.workspace.workItemTypes.nodes
|
||||
.find((item) => item.name === WORK_ITEM_TYPE_VALUE_TASK)
|
||||
.widgetDefinitions.findIndex((item) => item.type === 'CRM_CONTACTS'),
|
||||
1,
|
||||
);
|
||||
|
||||
const convertWorkItemMutationSuccessHandler = jest
|
||||
.fn()
|
||||
|
|
@ -58,14 +62,11 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
const oneDesignQueryHandler = jest.fn().mockResolvedValue(designCollectionResponse([mockDesign]));
|
||||
|
||||
const createComponent = ({
|
||||
hasOkrsFeature = true,
|
||||
okrsMvc = true,
|
||||
hasParent = false,
|
||||
hasChildren = false,
|
||||
widgets = [],
|
||||
workItemType = WORK_ITEM_TYPE_VALUE_TASK,
|
||||
convertWorkItemMutationHandler = convertWorkItemMutationSuccessHandler,
|
||||
|
||||
designQueryHandler = noDesignQueryHandler,
|
||||
} = {}) => {
|
||||
wrapper = mountExtended(WorkItemChangeTypeModal, {
|
||||
|
|
@ -83,12 +84,7 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
widgets,
|
||||
workItemType,
|
||||
allowedChildTypes: [{ name: WORK_ITEM_TYPE_VALUE_TASK }],
|
||||
},
|
||||
provide: {
|
||||
hasOkrsFeature,
|
||||
glFeatures: {
|
||||
okrsMvc,
|
||||
},
|
||||
allowedWorkItemTypesEE: [],
|
||||
},
|
||||
stubs: {
|
||||
GlModal: stubComponent(GlModal, {
|
||||
|
|
@ -125,25 +121,20 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
});
|
||||
|
||||
it('renders all types as select options', () => {
|
||||
expect(findGlFormSelect().findAll('option')).toHaveLength(4);
|
||||
});
|
||||
|
||||
it('does not render objective and key result if `okrsMvc` is disabled', () => {
|
||||
createComponent({ okrsMvc: false });
|
||||
|
||||
expect(findGlFormSelect().findAll('option')).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('does not allow to change type and disables `Change type` button when the work item has a parent', async () => {
|
||||
createComponent({ hasParent: true, widgets: workItemQueryResponse.data.workItem.widgets });
|
||||
|
||||
findGlFormSelect().vm.$emit('change', WORK_ITEM_TYPE_ENUM_KEY_RESULT);
|
||||
|
||||
await nextTick();
|
||||
await waitForPromises();
|
||||
|
||||
findGlFormSelect().vm.$emit('change', issueTypeId);
|
||||
|
||||
await nextTick();
|
||||
|
||||
expect(findWarningAlert().text()).toBe(
|
||||
'Parent item type issue is not supported on key result. Remove the parent item to change type.',
|
||||
'Parent item type issue is not supported on issue. Remove the parent item to change type.',
|
||||
);
|
||||
|
||||
expect(findChangeTypeModal().props('actionPrimary').attributes.disabled).toBe(true);
|
||||
|
|
@ -152,13 +143,14 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
it('does not allow to change type and disables `Change type` button when the work item has child items', async () => {
|
||||
createComponent({ workItemType: WORK_ITEM_TYPE_VALUE_ISSUE, hasChildren: true });
|
||||
|
||||
findGlFormSelect().vm.$emit('change', WORK_ITEM_TYPE_ENUM_KEY_RESULT);
|
||||
|
||||
await nextTick();
|
||||
await waitForPromises();
|
||||
|
||||
findGlFormSelect().vm.$emit('change', taskTypeId);
|
||||
|
||||
await nextTick();
|
||||
|
||||
expect(findWarningAlert().text()).toBe(
|
||||
'Key result does not support the task child item types. Remove child items to change type.',
|
||||
'Task does not support the task child item types. Remove child items to change type.',
|
||||
);
|
||||
expect(findChangeTypeModal().props('actionPrimary').attributes.disabled).toBe(true);
|
||||
});
|
||||
|
|
@ -172,7 +164,7 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
|
||||
await waitForPromises();
|
||||
|
||||
findGlFormSelect().vm.$emit('change', WORK_ITEM_TYPE_ENUM_KEY_RESULT);
|
||||
findGlFormSelect().vm.$emit('change', taskTypeId);
|
||||
|
||||
await nextTick();
|
||||
|
||||
|
|
@ -180,32 +172,21 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
expect(findChangeTypeModal().props('actionPrimary').attributes.disabled).toBe(false);
|
||||
});
|
||||
|
||||
// These are all possible use cases of conflicts among project level work items
|
||||
// Other widgets are shared between all the work item types
|
||||
it.each`
|
||||
widgetType | widgetData | workItemType | typeTobeConverted | expectedString
|
||||
${WORK_ITEM_WIDGETS_NAME_MAP.MILESTONE} | ${workItemChangeTypeWidgets.MILESTONE} | ${WORK_ITEM_TYPE_VALUE_TASK} | ${WORK_ITEM_TYPE_ENUM_KEY_RESULT} | ${'Milestone'}
|
||||
${WORK_ITEM_WIDGETS_NAME_MAP.DEVELOPMENT} | ${workItemChangeTypeWidgets.DEVELOPMENT} | ${WORK_ITEM_TYPE_VALUE_ISSUE} | ${WORK_ITEM_TYPE_ENUM_KEY_RESULT} | ${'Development'}
|
||||
${WORK_ITEM_WIDGETS_NAME_MAP.CRM_CONTACTS} | ${workItemChangeTypeWidgets.CRM_CONTACTS} | ${WORK_ITEM_TYPE_VALUE_ISSUE} | ${WORK_ITEM_TYPE_ENUM_KEY_RESULT} | ${'Contacts'}
|
||||
${WORK_ITEM_WIDGETS_NAME_MAP.TIME_TRACKING} | ${workItemChangeTypeWidgets.TIME_TRACKING} | ${WORK_ITEM_TYPE_VALUE_ISSUE} | ${WORK_ITEM_TYPE_ENUM_KEY_RESULT} | ${'Time tracking'}
|
||||
`(
|
||||
'shows warning message in case of $widgetType widget',
|
||||
async ({ workItemType, widgetData, typeTobeConverted, expectedString }) => {
|
||||
createComponent({
|
||||
workItemType,
|
||||
widgets: [widgetData],
|
||||
});
|
||||
it('shows warning message in case of Contacts widget', async () => {
|
||||
createComponent({
|
||||
workItemType: WORK_ITEM_TYPE_VALUE_ISSUE,
|
||||
widgets: [workItemChangeTypeWidgets.CRM_CONTACTS],
|
||||
});
|
||||
|
||||
await waitForPromises();
|
||||
await waitForPromises();
|
||||
|
||||
findGlFormSelect().vm.$emit('change', typeTobeConverted);
|
||||
findGlFormSelect().vm.$emit('change', taskTypeId);
|
||||
|
||||
await nextTick();
|
||||
await nextTick();
|
||||
|
||||
expect(findWarningAlert().text()).toContain(expectedString);
|
||||
expect(findChangeTypeModal().props('actionPrimary').attributes.disabled).toBe(false);
|
||||
},
|
||||
);
|
||||
expect(findWarningAlert().text()).toContain('Contacts');
|
||||
expect(findChangeTypeModal().props('actionPrimary').attributes.disabled).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('convert work item mutation', () => {
|
||||
|
|
@ -214,7 +195,7 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
|
||||
await waitForPromises();
|
||||
|
||||
findGlFormSelect().vm.$emit('change', WORK_ITEM_TYPE_ENUM_KEY_RESULT);
|
||||
findGlFormSelect().vm.$emit('change', issueTypeId);
|
||||
|
||||
await nextTick();
|
||||
|
||||
|
|
@ -225,7 +206,7 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
expect(convertWorkItemMutationSuccessHandler).toHaveBeenCalledWith({
|
||||
input: {
|
||||
id: 'gid://gitlab/WorkItem/1',
|
||||
workItemTypeId: keyResultTypeId,
|
||||
workItemTypeId: issueTypeId,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
|
@ -243,7 +224,7 @@ describe('WorkItemChangeTypeModal component', () => {
|
|||
|
||||
await waitForPromises();
|
||||
|
||||
findGlFormSelect().vm.$emit('change', WORK_ITEM_TYPE_ENUM_KEY_RESULT);
|
||||
findGlFormSelect().vm.$emit('change', issueTypeId);
|
||||
|
||||
await nextTick();
|
||||
|
||||
|
|
|
|||
|
|
@ -2065,12 +2065,6 @@ export const workItemChangeTypeWidgets = {
|
|||
totalTimeSpent: 10800,
|
||||
__typename: 'WorkItemWidgetTimeTracking',
|
||||
},
|
||||
PROGRESS: {
|
||||
type: 'PROGRESS',
|
||||
progress: 33,
|
||||
updatedAt: '2024-12-05T16:24:56Z',
|
||||
__typename: 'WorkItemWidgetProgress',
|
||||
},
|
||||
};
|
||||
|
||||
export const confidentialWorkItemTask = {
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ RSpec.describe Types::Ci::PipelineType, feature_category: :continuous_integratio
|
|||
coverage created_at updated_at started_at finished_at committed_at
|
||||
stages user retryable cancelable jobs source_job job job_artifacts downstream
|
||||
upstream path project active user_permissions warnings commit commit_path uses_needs
|
||||
test_report_summary test_suite ref ref_path warning_messages error_messages merge_request_event_type
|
||||
test_report_summary test_suite type ref ref_path warning_messages error_messages merge_request_event_type
|
||||
name total_jobs triggered_by_path child source stuck
|
||||
latest merge_request ref_text failure_reason yaml_errors yaml_error_messages trigger manual_variables
|
||||
]
|
||||
|
|
|
|||
|
|
@ -18,4 +18,34 @@ RSpec.describe GitlabSchema.types['CiRunner'], feature_category: :runner do
|
|||
|
||||
expect(described_class).to include_graphql_fields(*expected_fields)
|
||||
end
|
||||
|
||||
describe 'URLs to admin area', :enable_admin_mode do
|
||||
let_it_be(:runner) { create(:ci_runner, :instance) }
|
||||
|
||||
let(:query) do
|
||||
%(
|
||||
query{
|
||||
runners {
|
||||
nodes {
|
||||
adminUrl
|
||||
editAdminUrl
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
subject(:response) { GitlabSchema.execute(query, context: { current_user: current_user }) }
|
||||
|
||||
context 'when current user is an admin' do
|
||||
let_it_be(:current_user) { create(:admin) }
|
||||
|
||||
it 'is not nil' do
|
||||
runner = response.dig('data', 'runners', 'nodes', 0)
|
||||
|
||||
expect(runner['adminUrl']).not_to be_nil
|
||||
expect(runner['editAdminUrl']).not_to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,7 +3,9 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Ci::RunnersHelper, feature_category: :fleet_visibility do
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:admin_user) { create(:user, :admin) }
|
||||
let_it_be(:non_admin_user) { create(:user) }
|
||||
let_it_be(:user) { non_admin_user }
|
||||
|
||||
before do
|
||||
allow(helper).to receive(:current_user).and_return(user)
|
||||
|
|
@ -46,10 +48,34 @@ RSpec.describe Ci::RunnersHelper, feature_category: :fleet_visibility do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#admin_runners_data_attributes' do
|
||||
subject { helper.admin_runners_data_attributes }
|
||||
describe '#admin_runners_app_data', :enable_admin_mode do
|
||||
let_it_be(:user) { admin_user }
|
||||
|
||||
it_behaves_like 'admin_runners_data_attributes contains data'
|
||||
subject(:data) { helper.admin_runners_app_data }
|
||||
|
||||
it 'returns correct data' do
|
||||
expect(data).to include(
|
||||
runner_install_help_page: 'https://docs.gitlab.com/runner/install/',
|
||||
new_runner_path: '/admin/runners/new',
|
||||
allow_registration_token: 'true',
|
||||
registration_token: Gitlab::CurrentSettings.runners_registration_token,
|
||||
online_contact_timeout_secs: 7200,
|
||||
stale_timeout_secs: 604800,
|
||||
tag_suggestions_path: '/admin/runners/tag_list.json',
|
||||
can_admin_runners: 'true'
|
||||
)
|
||||
end
|
||||
|
||||
context 'when current user is not an admin' do
|
||||
let_it_be(:user) { non_admin_user }
|
||||
|
||||
it 'returns the correct data' do
|
||||
expect(data).to include(
|
||||
registration_token: nil,
|
||||
can_admin_runners: 'false'
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#group_shared_runners_settings_data' do
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ require 'spec_helper'
|
|||
|
||||
RSpec.describe Authn::Tokens::ClusterAgentToken, feature_category: :system_access do
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:admin) { create(:admin) }
|
||||
|
||||
let(:cluster_agent_token) { create(:cluster_agent_token, token_encrypted: nil) }
|
||||
|
||||
|
|
@ -15,11 +16,13 @@ RSpec.describe Authn::Tokens::ClusterAgentToken, feature_category: :system_acces
|
|||
|
||||
it_behaves_like 'finding the valid revocable'
|
||||
|
||||
describe '#revoke!' do
|
||||
it 'does not support revocation yet' do
|
||||
expect do
|
||||
token.revoke!(user)
|
||||
end.to raise_error(::Authn::AgnosticTokenIdentifier::UnsupportedTokenError, 'Unsupported token type')
|
||||
describe '#revoke!', :enable_admin_mode do
|
||||
it 'revokes the token' do
|
||||
expect(token.revocable.revoked?).to be_falsey
|
||||
|
||||
expect(token.revoke!(admin)).to be_success
|
||||
|
||||
expect(token.revocable.revoked?).to be_truthy
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,99 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdOfVulnerabilityReads, schema: 20231220225325 do
|
||||
before(:all) do
|
||||
# This migration will not work if a sec database is configured. It should be finalized and removed prior to
|
||||
# sec db rollout.
|
||||
# Consult https://gitlab.com/gitlab-org/gitlab/-/merge_requests/171707 for more info.
|
||||
skip_if_multiple_databases_are_setup(:sec)
|
||||
end
|
||||
|
||||
let(:organizations) { table(:organizations) }
|
||||
let(:namespaces) { table(:namespaces) }
|
||||
let(:projects) { table(:projects) }
|
||||
let(:users) { table(:users) }
|
||||
let(:scanners) { table(:vulnerability_scanners) }
|
||||
let(:vulnerabilities) { table(:vulnerabilities) }
|
||||
let(:vulnerability_reads) { table(:vulnerability_reads) }
|
||||
|
||||
let(:vulnerability_findings) { table(:vulnerability_occurrences) }
|
||||
let(:scanners_table) { table(:vulnerability_scanners) }
|
||||
let(:identifiers_table) { table(:vulnerability_identifiers) }
|
||||
|
||||
let(:organization) { organizations.create!(name: 'organization', path: 'organization') }
|
||||
let(:namespace) { namespaces.create!(name: 'user', path: 'user', organization_id: organization.id) }
|
||||
let(:project) do
|
||||
projects.create!(namespace_id: namespace.id, project_namespace_id: namespace.id, organization_id: organization.id)
|
||||
end
|
||||
|
||||
let(:user) { users.create!(username: 'john_doe', email: 'johndoe@gitlab.com', projects_limit: 10) }
|
||||
let(:scanner) { scanners.create!(project_id: project.id, external_id: 'external_id', name: 'Test Scanner') }
|
||||
let(:vulnerability) do
|
||||
identifier = identifiers_table.create!(
|
||||
project_id: project.id,
|
||||
fingerprint: SecureRandom.hex(20),
|
||||
external_id: "cwe-2021-1234",
|
||||
external_type: "cwe",
|
||||
name: "CWE-73"
|
||||
)
|
||||
|
||||
scanner = scanners_table.find_or_create_by!(name: 'bar') do |scanner|
|
||||
scanner.project_id = project.id
|
||||
scanner.external_id = 'foo'
|
||||
end
|
||||
|
||||
finding = vulnerability_findings.create!(
|
||||
project_id: project.id,
|
||||
scanner_id: scanner.id,
|
||||
severity: 1,
|
||||
report_type: 0, # sast
|
||||
primary_identifier_id: identifier.id,
|
||||
project_fingerprint: SecureRandom.hex(20),
|
||||
location_fingerprint: SecureRandom.hex(20),
|
||||
uuid: SecureRandom.uuid,
|
||||
name: identifier.name,
|
||||
raw_metadata: "{}",
|
||||
metadata_version: "test:1.0"
|
||||
)
|
||||
|
||||
vulnerabilities.create!(
|
||||
project_id: project.id,
|
||||
author_id: user.id,
|
||||
finding_id: finding.id,
|
||||
title: 'test',
|
||||
severity: finding.severity,
|
||||
confidence: 1,
|
||||
report_type: 1
|
||||
)
|
||||
end
|
||||
|
||||
let(:vulnerability_read) do
|
||||
vulnerability_reads.create!(
|
||||
project_id: project.id,
|
||||
vulnerability_id: vulnerability.id,
|
||||
scanner_id: scanner.id,
|
||||
severity: 1,
|
||||
report_type: 1,
|
||||
state: 1,
|
||||
uuid: SecureRandom.uuid
|
||||
)
|
||||
end
|
||||
|
||||
subject(:perform_migration) do
|
||||
described_class.new(
|
||||
start_id: vulnerability_read.vulnerability_id,
|
||||
end_id: vulnerability_read.vulnerability_id,
|
||||
batch_table: :vulnerability_reads,
|
||||
batch_column: :vulnerability_id,
|
||||
sub_batch_size: 1,
|
||||
pause_ms: 0,
|
||||
connection: ActiveRecord::Base.connection
|
||||
).perform
|
||||
end
|
||||
|
||||
it 'sets the namespace_id of existing record' do
|
||||
expect { perform_migration }.to change { vulnerability_read.reload.namespace_id }.from(nil).to(namespace.id)
|
||||
end
|
||||
end
|
||||
|
|
@ -418,6 +418,14 @@ RSpec.describe Gitlab::Database, feature_category: :database do
|
|||
ensure
|
||||
new_connection&.disconnect!
|
||||
end
|
||||
|
||||
it 'returns nil when database model does not exist' do
|
||||
connection = Project.connection
|
||||
db_config = double(name: 'unknown')
|
||||
|
||||
expect(described_class).to receive(:db_config_for_connection).with(connection).and_return(db_config)
|
||||
expect(described_class.gitlab_schemas_for_connection(connection)).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
describe '.database_base_models_with_gitlab_shared' do
|
||||
|
|
|
|||
|
|
@ -39,6 +39,15 @@ RSpec.describe Gitlab::Regex, feature_category: :tooling do
|
|||
it_behaves_like 'project name regex'
|
||||
end
|
||||
|
||||
describe '.oci_repository_path_regex' do
|
||||
subject { described_class.oci_repository_path_regex }
|
||||
|
||||
it { is_expected.to match("my_project") }
|
||||
it { is_expected.not_to match('_myproject') }
|
||||
it { is_expected.not_to match('myproject_') }
|
||||
it { is_expected.not_to match('_myproject_') }
|
||||
end
|
||||
|
||||
describe '.group_name_regex' do
|
||||
subject { described_class.group_name_regex }
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,85 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe FixGroupScimIdentities, feature_category: :system_access do
|
||||
let(:migration) { described_class.new }
|
||||
let(:groups) { table(:namespaces) }
|
||||
let(:users) { table(:users) }
|
||||
let(:scim_identities) { table(:scim_identities) }
|
||||
let(:group_scim_identities) { table(:group_scim_identities) }
|
||||
let(:organizations) { table(:organizations) }
|
||||
|
||||
# Set up parent group and users
|
||||
let(:organization) { organizations.create!(path: 'org') }
|
||||
let(:group) { groups.create!(name: 'test-group', path: 'test-group', organization_id: organization.id) }
|
||||
let(:user1) { users.create!(email: 'user1@example.com', username: 'user1', projects_limit: 10) }
|
||||
let(:user2) { users.create!(email: 'user2@example.com', username: 'user2', projects_limit: 10) }
|
||||
let(:user3) { users.create!(email: 'user3@example.com', username: 'user3', projects_limit: 10) }
|
||||
|
||||
# Test data setup using table helper
|
||||
let(:active_scim) { scim_identities.create!(active: true, user_id: user1.id, extern_uid: '1') }
|
||||
let(:inactive_scim) { scim_identities.create!(active: false, user_id: user2.id, extern_uid: '2') }
|
||||
let(:matching_scim) { scim_identities.create!(active: true, user_id: user3.id, extern_uid: '3') }
|
||||
|
||||
let(:mismatched_active_group_scim) do
|
||||
group_scim_identities.create!(
|
||||
temp_source_id: active_scim.id,
|
||||
active: false,
|
||||
group_id: group.id,
|
||||
user_id: user1.id,
|
||||
extern_uid: '4'
|
||||
)
|
||||
end
|
||||
|
||||
let(:mismatched_inactive_group_scim) do
|
||||
group_scim_identities.create!(
|
||||
temp_source_id: inactive_scim.id,
|
||||
active: true,
|
||||
group_id: group.id,
|
||||
user_id: user2.id,
|
||||
extern_uid: '5'
|
||||
)
|
||||
end
|
||||
|
||||
let(:matching_group_scim) do
|
||||
group_scim_identities.create!(
|
||||
temp_source_id: matching_scim.id,
|
||||
active: true,
|
||||
group_id: group.id,
|
||||
user_id: user3.id,
|
||||
extern_uid: '6'
|
||||
)
|
||||
end
|
||||
|
||||
describe '#up' do
|
||||
it 'updates mismatched group_scim_identities active status' do
|
||||
# Setup test data
|
||||
mismatched_active_group_scim
|
||||
mismatched_inactive_group_scim
|
||||
matching_group_scim
|
||||
|
||||
migrate!
|
||||
|
||||
expect(group_scim_identities.find(mismatched_active_group_scim.id).active).to be true
|
||||
expect(group_scim_identities.find(mismatched_inactive_group_scim.id).active).to be false
|
||||
expect(group_scim_identities.find(matching_group_scim.id).active).to be true
|
||||
end
|
||||
|
||||
it 'handles orphaned records gracefully' do
|
||||
orphaned_user = users.create!(email: 'orphaned@example.com', username: 'orphaned', projects_limit: 10)
|
||||
orphaned_record = group_scim_identities.create!(
|
||||
temp_source_id: nil,
|
||||
active: true,
|
||||
group_id: group.id,
|
||||
user_id: orphaned_user.id,
|
||||
extern_uid: '1'
|
||||
)
|
||||
|
||||
migrate!
|
||||
|
||||
expect(group_scim_identities.find(orphaned_record.id).active).to be true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -2538,6 +2538,8 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
|
|||
subject { build.variables }
|
||||
|
||||
context 'returns variables' do
|
||||
let(:pages_hostname) { "#{project.namespace.path}.example.com" }
|
||||
let(:pages_url) { "http://#{pages_hostname}/#{project.path}" }
|
||||
let(:predefined_variables) do
|
||||
[
|
||||
{ key: 'CI_PIPELINE_ID', value: pipeline.id.to_s, public: true, masked: false },
|
||||
|
|
@ -2614,7 +2616,9 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
|
|||
{ key: 'CI_COMMIT_DESCRIPTION', value: pipeline.git_commit_description, public: true, masked: false },
|
||||
{ key: 'CI_COMMIT_REF_PROTECTED', value: (!!pipeline.protected_ref?).to_s, public: true, masked: false },
|
||||
{ key: 'CI_COMMIT_TIMESTAMP', value: pipeline.git_commit_timestamp, public: true, masked: false },
|
||||
{ key: 'CI_COMMIT_AUTHOR', value: pipeline.git_author_full_text, public: true, masked: false }
|
||||
{ key: 'CI_COMMIT_AUTHOR', value: pipeline.git_author_full_text, public: true, masked: false },
|
||||
{ key: 'CI_PAGES_HOSTNAME', value: pages_hostname, public: true, masked: false },
|
||||
{ key: 'CI_PAGES_URL', value: pages_url, public: true, masked: false }
|
||||
]
|
||||
end
|
||||
|
||||
|
|
@ -2684,14 +2688,15 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
|
|||
pipeline_pre_var,
|
||||
build_yaml_var,
|
||||
job_dependency_var,
|
||||
{ key: 'secret', value: 'value', public: false, masked: false }])
|
||||
{ key: 'secret', value: 'value', public: false, masked: false },
|
||||
{ key: "CI_PAGES_HOSTNAME", value: pages_hostname, masked: false, public: true },
|
||||
{ key: "CI_PAGES_URL", value: pages_url, masked: false, public: true }])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when build has environment and user-provided variables' do
|
||||
let(:expected_variables) do
|
||||
predefined_variables.map { |variable| variable.fetch(:key) } +
|
||||
%w[YAML_VARIABLE CI_ENVIRONMENT_SLUG CI_ENVIRONMENT_URL]
|
||||
predefined_variables.map { |variable| variable.fetch(:key) }
|
||||
end
|
||||
|
||||
before do
|
||||
|
|
@ -2708,12 +2713,20 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
|
|||
{ key: 'CI_ENVIRONMENT_URL', value: 'https://gitlab.com', public: true, masked: false }
|
||||
],
|
||||
after: 'CI_NODE_TOTAL')
|
||||
|
||||
insert_expected_predefined_variables(
|
||||
[
|
||||
{ key: 'YAML_VARIABLE', value: 'staging', public: true, masked: false },
|
||||
{ key: 'CI_ENVIRONMENT_SLUG', value: 'start', public: true, masked: false },
|
||||
{ key: 'CI_ENVIRONMENT_URL', value: 'https://gitlab.com', public: true, masked: false }
|
||||
],
|
||||
after: 'CI_COMMIT_AUTHOR')
|
||||
end
|
||||
|
||||
it 'matches explicit variables ordering' do
|
||||
received_variables = subject.map { |variable| variable[:key] }
|
||||
|
||||
expect(received_variables).to eq expected_variables
|
||||
expect(received_variables).to eq(expected_variables)
|
||||
end
|
||||
|
||||
describe 'CI_ENVIRONMENT_ACTION' do
|
||||
|
|
@ -2882,7 +2895,9 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
|
|||
it_behaves_like 'containing environment variables'
|
||||
|
||||
it 'puts $CI_ENVIRONMENT_URL in the last so all other variables are available to be used when runners are trying to expand it' do
|
||||
expect(subject.to_runner_variables.last).to eq(expected_environment_variables.last)
|
||||
ci_env_url = subject.to_runner_variables.find { |var| var[:key] == 'CI_ENVIRONMENT_URL' }
|
||||
|
||||
expect(ci_env_url).to eq(expected_environment_variables.last)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -931,6 +931,77 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
|
|||
end
|
||||
end
|
||||
|
||||
describe '#tag_pipeline?' do
|
||||
subject { pipeline.tag_pipeline? }
|
||||
|
||||
context 'when pipeline is for a tag' do
|
||||
let(:pipeline) { create(:ci_pipeline, tag: true) }
|
||||
|
||||
it { is_expected.to be_truthy }
|
||||
end
|
||||
|
||||
context 'when pipeline is not for a tag' do
|
||||
let(:pipeline) { create(:ci_pipeline, tag: false) }
|
||||
|
||||
it { is_expected.to be_falsy }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#type' do
|
||||
subject { pipeline.type }
|
||||
|
||||
context 'when pipeline is for a branch' do
|
||||
let(:pipeline) { create(:ci_pipeline, tag: false) }
|
||||
|
||||
it { is_expected.to eq('branch') }
|
||||
end
|
||||
|
||||
context 'when pipeline is for a tag' do
|
||||
let(:pipeline) { create(:ci_pipeline, tag: true) }
|
||||
|
||||
it { is_expected.to eq('tag') }
|
||||
end
|
||||
|
||||
context 'when pipeline is merge request pipeline' do
|
||||
let!(:pipeline) do
|
||||
create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, target_sha: target_sha)
|
||||
end
|
||||
|
||||
let(:target_sha) { nil }
|
||||
let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
|
||||
|
||||
it { is_expected.to eq('merge_request') }
|
||||
|
||||
context 'when pipeline is detached merge request pipeline' do
|
||||
let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
|
||||
|
||||
it { is_expected.to eq('merge_request') }
|
||||
end
|
||||
end
|
||||
|
||||
context 'when pipeline is merged results pipeline' do
|
||||
let!(:pipeline) do
|
||||
create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, target_sha: target_sha)
|
||||
end
|
||||
|
||||
let(:merge_request) { create(:merge_request) }
|
||||
let(:target_sha) { merge_request.target_branch_sha }
|
||||
|
||||
it { is_expected.to eq('merged_result') }
|
||||
end
|
||||
|
||||
context 'when pipeline is merge train pipeline', if: Gitlab.ee? do
|
||||
let!(:pipeline) do
|
||||
create(:ci_pipeline, source: :merge_request_event, merge_request: merge_request, ref: ref, target_sha: 'xxx')
|
||||
end
|
||||
|
||||
let(:merge_request) { create(:merge_request) }
|
||||
let(:ref) { 'refs/merge-requests/1/train' }
|
||||
|
||||
it { is_expected.to eq('merge_train') }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#merge_request_ref?' do
|
||||
subject { pipeline.merge_request_ref? }
|
||||
|
||||
|
|
|
|||
|
|
@ -248,6 +248,16 @@ RSpec.describe Packages::Package, type: :model, feature_category: :package_regis
|
|||
end
|
||||
end
|
||||
|
||||
describe '.with_version_like' do
|
||||
let(:version_pattern) { '%.0.1%' }
|
||||
|
||||
subject { described_class.with_version_like(version_pattern) }
|
||||
|
||||
it 'includes packages with the version pattern' do
|
||||
is_expected.to match_array([package2, package3])
|
||||
end
|
||||
end
|
||||
|
||||
describe '.without_version_like' do
|
||||
let(:version_pattern) { '%.0.0%' }
|
||||
|
||||
|
|
|
|||
|
|
@ -305,7 +305,7 @@ RSpec.describe VirtualRegistries::Packages::Maven::Cache::Entry, type: :model, f
|
|||
let_it_be(:parent) { create(:virtual_registries_packages_maven_upstream) }
|
||||
let_it_be(:model) { create(:virtual_registries_packages_maven_cache_entry, upstream: parent) }
|
||||
|
||||
let(:find_model) { model.reload }
|
||||
let(:find_model) { described_class.last }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue