Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-04-01 09:07:34 +00:00
parent 5bb3cef450
commit 0517c858ce
43 changed files with 747 additions and 409 deletions

View File

@ -427,17 +427,17 @@
{"name":"nkf","version":"0.2.0","platform":"java","checksum":"3e6f022d1743a863bf05e936c7c2110be07ba1c593ea974df75d89e8bf7cc967"},
{"name":"nkf","version":"0.2.0","platform":"ruby","checksum":"fbc151bda025451f627fafdfcb3f4f13d0b22ae11f58c6d3a2939c76c5f5f126"},
{"name":"no_proxy_fix","version":"0.1.2","platform":"ruby","checksum":"4e9b4c31bb146de7fcf347dc1087bb13ac2039b56d50aa019e61036256abcd00"},
{"name":"nokogiri","version":"1.18.6","platform":"aarch64-linux-gnu","checksum":"1b11f9a814068282cc2b47ebe61395b2a69d1918092d2ca3bd664074f72540e9"},
{"name":"nokogiri","version":"1.18.6","platform":"aarch64-linux-musl","checksum":"797662f201c37a8feac3bd5b0c0e3447053bc71e6633d273fefd4c68b03e6a54"},
{"name":"nokogiri","version":"1.18.6","platform":"arm-linux-gnu","checksum":"2da07a07ef4c9d9e9da809b3dc0937ed90b031e32c2c658d9918941b85d68b95"},
{"name":"nokogiri","version":"1.18.6","platform":"arm-linux-musl","checksum":"e8ae1c9a4d8cfa7a92d632a6f596a88235ebe66d4b70418543378ba16c601f70"},
{"name":"nokogiri","version":"1.18.6","platform":"arm64-darwin","checksum":"727a441d179d934b4b7c73e0e28e6723ee46463d96bb0cc6e2e33a13540962c4"},
{"name":"nokogiri","version":"1.18.6","platform":"java","checksum":"bf16c53446987007ff3e1deb29d65d20444073ba112cb5bddbd2671135ba293c"},
{"name":"nokogiri","version":"1.18.6","platform":"ruby","checksum":"4d283431d7829719ea1287ca388f24c6ce343af736bbcbd1365cbdb83bce41a4"},
{"name":"nokogiri","version":"1.18.6","platform":"x64-mingw-ucrt","checksum":"134f6d54f56edd46cb6db77c9d9de1704b3f83b3981a6763671e3cfbeba221f5"},
{"name":"nokogiri","version":"1.18.6","platform":"x86_64-darwin","checksum":"fb72568c97ccd90a8d68cb765b0ff0720b109bd62e3babbf372e854ef8fef995"},
{"name":"nokogiri","version":"1.18.6","platform":"x86_64-linux-gnu","checksum":"df065db6ba6e1e80f76ef04f860fcf260cc24685125fe33cdc3d1572a1c66b71"},
{"name":"nokogiri","version":"1.18.6","platform":"x86_64-linux-musl","checksum":"75ec7a93cec54687aa63b2eaf830dc4ac5b4f3d8c969f20c035e67c9e6a30cef"},
{"name":"nokogiri","version":"1.18.7","platform":"aarch64-linux-gnu","checksum":"57a064ab5440814a69a0e040817bd8154adea68a30d2ff2b3aa515a6a06dbb5f"},
{"name":"nokogiri","version":"1.18.7","platform":"aarch64-linux-musl","checksum":"3e442dc5b69376e84288295fe37cbb890a21ad816a7e571e5e9967b3c1e30cd3"},
{"name":"nokogiri","version":"1.18.7","platform":"arm-linux-gnu","checksum":"337d9149deb5ae01022dff7c90f97bed81715fd586aacab0c5809ef933994c5e"},
{"name":"nokogiri","version":"1.18.7","platform":"arm-linux-musl","checksum":"97a26edcc975f780a0822aaf7f7d7427c561067c1c9ee56bd3542960f0c28a6e"},
{"name":"nokogiri","version":"1.18.7","platform":"arm64-darwin","checksum":"083abb2e9ed2646860f6b481a981485a658c6064caafaa81bf1cda1bada2e9d5"},
{"name":"nokogiri","version":"1.18.7","platform":"java","checksum":"2cb83666f35619ec59d24d831bf492e49cfe27b112c222330ee929737f42f2eb"},
{"name":"nokogiri","version":"1.18.7","platform":"ruby","checksum":"6b63ff5defe48f30d1d3b3122f65255ca91df2caf5378c6e0482ce73ff46fb31"},
{"name":"nokogiri","version":"1.18.7","platform":"x64-mingw-ucrt","checksum":"681148fbc918aa5d54933d8b48aeb9462ab708d23409797ed750af961107f72b"},
{"name":"nokogiri","version":"1.18.7","platform":"x86_64-darwin","checksum":"081d1aa517454ba3415304e2ea51fe411d6a3a809490d0c4aa42799cada417b7"},
{"name":"nokogiri","version":"1.18.7","platform":"x86_64-linux-gnu","checksum":"3a0bf946eb2defde13d760f869b61bc8b0c18875afdd3cffa96543cfa3a18005"},
{"name":"nokogiri","version":"1.18.7","platform":"x86_64-linux-musl","checksum":"9d83f8ec1fc37a305fa835d7ee61a4f37899e6ccc6dcb05be6645fa9797605af"},
{"name":"notiffany","version":"0.1.3","platform":"ruby","checksum":"d37669605b7f8dcb04e004e6373e2a780b98c776f8eb503ac9578557d7808738"},
{"name":"numerizer","version":"0.2.0","platform":"ruby","checksum":"e58076d5ee5370417b7e52d9cb25836d62acd1b8d9a194c308707986c1705d7b"},
{"name":"oauth","version":"0.5.6","platform":"ruby","checksum":"4085fe28e0c5e2434135e00a6555294fd2a4ff96a98d1bdecdcd619fc6368dff"},

View File

@ -1217,7 +1217,7 @@ GEM
nio4r (2.7.0)
nkf (0.2.0)
no_proxy_fix (0.1.2)
nokogiri (1.18.6)
nokogiri (1.18.7)
mini_portile2 (~> 2.8.2)
racc (~> 1.4)
notiffany (0.1.3)

View File

@ -430,17 +430,17 @@
{"name":"nkf","version":"0.2.0","platform":"java","checksum":"3e6f022d1743a863bf05e936c7c2110be07ba1c593ea974df75d89e8bf7cc967"},
{"name":"nkf","version":"0.2.0","platform":"ruby","checksum":"fbc151bda025451f627fafdfcb3f4f13d0b22ae11f58c6d3a2939c76c5f5f126"},
{"name":"no_proxy_fix","version":"0.1.2","platform":"ruby","checksum":"4e9b4c31bb146de7fcf347dc1087bb13ac2039b56d50aa019e61036256abcd00"},
{"name":"nokogiri","version":"1.18.6","platform":"aarch64-linux-gnu","checksum":"1b11f9a814068282cc2b47ebe61395b2a69d1918092d2ca3bd664074f72540e9"},
{"name":"nokogiri","version":"1.18.6","platform":"aarch64-linux-musl","checksum":"797662f201c37a8feac3bd5b0c0e3447053bc71e6633d273fefd4c68b03e6a54"},
{"name":"nokogiri","version":"1.18.6","platform":"arm-linux-gnu","checksum":"2da07a07ef4c9d9e9da809b3dc0937ed90b031e32c2c658d9918941b85d68b95"},
{"name":"nokogiri","version":"1.18.6","platform":"arm-linux-musl","checksum":"e8ae1c9a4d8cfa7a92d632a6f596a88235ebe66d4b70418543378ba16c601f70"},
{"name":"nokogiri","version":"1.18.6","platform":"arm64-darwin","checksum":"727a441d179d934b4b7c73e0e28e6723ee46463d96bb0cc6e2e33a13540962c4"},
{"name":"nokogiri","version":"1.18.6","platform":"java","checksum":"bf16c53446987007ff3e1deb29d65d20444073ba112cb5bddbd2671135ba293c"},
{"name":"nokogiri","version":"1.18.6","platform":"ruby","checksum":"4d283431d7829719ea1287ca388f24c6ce343af736bbcbd1365cbdb83bce41a4"},
{"name":"nokogiri","version":"1.18.6","platform":"x64-mingw-ucrt","checksum":"134f6d54f56edd46cb6db77c9d9de1704b3f83b3981a6763671e3cfbeba221f5"},
{"name":"nokogiri","version":"1.18.6","platform":"x86_64-darwin","checksum":"fb72568c97ccd90a8d68cb765b0ff0720b109bd62e3babbf372e854ef8fef995"},
{"name":"nokogiri","version":"1.18.6","platform":"x86_64-linux-gnu","checksum":"df065db6ba6e1e80f76ef04f860fcf260cc24685125fe33cdc3d1572a1c66b71"},
{"name":"nokogiri","version":"1.18.6","platform":"x86_64-linux-musl","checksum":"75ec7a93cec54687aa63b2eaf830dc4ac5b4f3d8c969f20c035e67c9e6a30cef"},
{"name":"nokogiri","version":"1.18.7","platform":"aarch64-linux-gnu","checksum":"57a064ab5440814a69a0e040817bd8154adea68a30d2ff2b3aa515a6a06dbb5f"},
{"name":"nokogiri","version":"1.18.7","platform":"aarch64-linux-musl","checksum":"3e442dc5b69376e84288295fe37cbb890a21ad816a7e571e5e9967b3c1e30cd3"},
{"name":"nokogiri","version":"1.18.7","platform":"arm-linux-gnu","checksum":"337d9149deb5ae01022dff7c90f97bed81715fd586aacab0c5809ef933994c5e"},
{"name":"nokogiri","version":"1.18.7","platform":"arm-linux-musl","checksum":"97a26edcc975f780a0822aaf7f7d7427c561067c1c9ee56bd3542960f0c28a6e"},
{"name":"nokogiri","version":"1.18.7","platform":"arm64-darwin","checksum":"083abb2e9ed2646860f6b481a981485a658c6064caafaa81bf1cda1bada2e9d5"},
{"name":"nokogiri","version":"1.18.7","platform":"java","checksum":"2cb83666f35619ec59d24d831bf492e49cfe27b112c222330ee929737f42f2eb"},
{"name":"nokogiri","version":"1.18.7","platform":"ruby","checksum":"6b63ff5defe48f30d1d3b3122f65255ca91df2caf5378c6e0482ce73ff46fb31"},
{"name":"nokogiri","version":"1.18.7","platform":"x64-mingw-ucrt","checksum":"681148fbc918aa5d54933d8b48aeb9462ab708d23409797ed750af961107f72b"},
{"name":"nokogiri","version":"1.18.7","platform":"x86_64-darwin","checksum":"081d1aa517454ba3415304e2ea51fe411d6a3a809490d0c4aa42799cada417b7"},
{"name":"nokogiri","version":"1.18.7","platform":"x86_64-linux-gnu","checksum":"3a0bf946eb2defde13d760f869b61bc8b0c18875afdd3cffa96543cfa3a18005"},
{"name":"nokogiri","version":"1.18.7","platform":"x86_64-linux-musl","checksum":"9d83f8ec1fc37a305fa835d7ee61a4f37899e6ccc6dcb05be6645fa9797605af"},
{"name":"notiffany","version":"0.1.3","platform":"ruby","checksum":"d37669605b7f8dcb04e004e6373e2a780b98c776f8eb503ac9578557d7808738"},
{"name":"numerizer","version":"0.2.0","platform":"ruby","checksum":"e58076d5ee5370417b7e52d9cb25836d62acd1b8d9a194c308707986c1705d7b"},
{"name":"oauth","version":"0.5.6","platform":"ruby","checksum":"4085fe28e0c5e2434135e00a6555294fd2a4ff96a98d1bdecdcd619fc6368dff"},

View File

@ -1234,7 +1234,7 @@ GEM
nio4r (2.7.0)
nkf (0.2.0)
no_proxy_fix (0.1.2)
nokogiri (1.18.6)
nokogiri (1.18.7)
mini_portile2 (~> 2.8.2)
racc (~> 1.4)
notiffany (0.1.3)

View File

@ -24,6 +24,7 @@ import TitleSuggestions from '~/issues/new/components/title_suggestions.vue';
import { addShortcutsExtension } from '~/behaviors/shortcuts';
import ZenMode from '~/zen_mode';
import ShortcutsWorkItems from '~/behaviors/shortcuts/shortcuts_work_items';
import WorkItemDates from 'ee_else_ce/work_items/components/work_item_dates.vue';
import {
getDisplayReference,
getNewWorkItemAutoSaveKey,
@ -69,7 +70,6 @@ import WorkItemMilestone from './work_item_milestone.vue';
import WorkItemParent from './work_item_parent.vue';
import WorkItemLoading from './work_item_loading.vue';
import WorkItemCrmContacts from './work_item_crm_contacts.vue';
import WorkItemDates from './work_item_dates.vue';
export default {
components: {

View File

@ -4,6 +4,8 @@ import glFeatureFlagMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import { ListType } from '~/boards/constants';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import WorkItemDates from 'ee_else_ce/work_items/components/work_item_dates.vue';
import {
WIDGET_TYPE_ASSIGNEES,
WIDGET_TYPE_HEALTH_STATUS,
@ -32,7 +34,6 @@ import WorkItemMilestone from './work_item_milestone.vue';
import WorkItemParent from './work_item_parent.vue';
import WorkItemTimeTracking from './work_item_time_tracking.vue';
import WorkItemCrmContacts from './work_item_crm_contacts.vue';
import WorkItemDates from './work_item_dates.vue';
export default {
ListType,

View File

@ -1,5 +1,5 @@
<script>
import { GlDatepicker, GlFormGroup, GlFormRadio } from '@gitlab/ui';
import { GlDatepicker, GlFormGroup } from '@gitlab/ui';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import { findStartAndDueDateWidget, newWorkItemId } from '~/work_items/utils';
import { s__ } from '~/locale';
@ -26,7 +26,6 @@ export default {
components: {
GlDatepicker,
GlFormGroup,
GlFormRadio,
WorkItemSidebarWidget,
},
mixins: [Tracking.mixin()],
@ -98,7 +97,7 @@ export default {
tracking() {
return {
category: TRACKING_CATEGORY_SHOW,
label: 'item_rolledup_dates',
label: 'item_dates',
property: `type_${this.workItemType}`,
};
},
@ -165,57 +164,6 @@ export default {
this.localDueDate = this.localStartDate;
}
},
updateRollupType() {
this.isUpdating = true;
this.track('updated_rollup_type');
if (this.workItemId === newWorkItemId(this.workItemType)) {
this.$apollo.mutate({
mutation: updateNewWorkItemMutation,
variables: {
input: {
workItemType: this.workItemType,
fullPath: this.fullPath,
rolledUpDates: {
isFixed: this.rollupType === ROLLUP_TYPE_FIXED,
rollUp: this.shouldRollUp,
},
},
},
});
this.isUpdating = false;
return;
}
this.$apollo
.mutate({
mutation: updateWorkItemMutation,
variables: {
input: {
id: this.workItemId,
startAndDueDateWidget: {
isFixed: this.rollupType === ROLLUP_TYPE_FIXED,
},
},
},
optimisticResponse: this.optimisticResponse,
})
.then(({ data }) => {
if (data.workItemUpdate.errors.length) {
throw new Error(data.workItemUpdate.errors.join('; '));
}
})
.catch((error) => {
const message = sprintfWorkItem(I18N_WORK_ITEM_ERROR_UPDATING, this.workItemType);
this.$emit('error', message);
Sentry.captureException(error);
})
.finally(() => {
this.isUpdating = false;
});
},
updateDates() {
if (this.datesUnchanged) {
return;
@ -254,7 +202,6 @@ export default {
input: {
id: this.workItemId,
startAndDueDateWidget: {
isFixed: true,
dueDate: this.localDueDate ? toISODateFormat(this.localDueDate) : null,
startDate: this.localStartDate ? toISODateFormat(this.localStartDate) : null,
},
@ -291,25 +238,6 @@ export default {
{{ s__('WorkItem|Dates') }}
</template>
<template #content>
<fieldset v-if="shouldRollUp" class="gl-mt-2 gl-flex gl-gap-5">
<legend class="gl-sr-only">{{ s__('WorkItem|Dates') }}</legend>
<gl-form-radio
v-model="rollupType"
value="fixed"
:disabled="!canUpdate || isUpdating"
@change="updateRollupType"
>
{{ s__('WorkItem|Fixed') }}
</gl-form-radio>
<gl-form-radio
v-model="rollupType"
value="inherited"
:disabled="!canUpdate || isUpdating"
@change="updateRollupType"
>
{{ s__('WorkItem|Inherited') }}
</gl-form-radio>
</fieldset>
<p class="gl-m-0 gl-py-1">
<span class="gl-inline-block gl-min-w-8">{{ s__('WorkItem|Start') }}:</span>
<span data-testid="start-date-value" :class="{ 'gl-text-subtle': !startDate }">

View File

@ -63,7 +63,7 @@ module Ci
Ci::BuildSource
.where(project_id: project.id)
.loose_index_scan(column: :source)
.select(:source).where(source: sources)
.where(source: sources)
end
def array_mapping_scope

View File

@ -28,13 +28,15 @@ module LooseIndexScan
# > User.where(id: distinct_authors)
def loose_index_scan(column:, order: :asc)
arel_table = self.arel_table
arel_column = arel_table[column.to_s]
# Handle different column types
arel_column, column_alias, column_for_select = extract_column_and_alias_and_select(column, arel_table)
cte = Gitlab::SQL::RecursiveCTE.new(:loose_index_scan_cte, union_args: { remove_order: false })
cte_query = except(:select)
.select(column)
.order(column => order)
.select(column_for_select)
.order(column_alias => order)
.limit(1)
inner_query = except(:select)
@ -43,24 +45,38 @@ module LooseIndexScan
cte << cte_query
inner_query = if order == :asc
inner_query.where(arel_column.gt(cte.table[column.to_s]))
inner_query.where(arel_column.gt(cte.table[column_alias]))
else
inner_query.where(arel_column.lt(cte.table[column.to_s]))
inner_query.where(arel_column.lt(cte.table[column_alias]))
end
inner_query = inner_query.order(column => order)
.select(column)
inner_query = inner_query
.select(column_for_select)
.order(column_alias => order)
.limit(1)
cte << cte.table
.project(Arel::Nodes::Grouping.new(Arel.sql(inner_query.to_sql)).as(column.to_s))
.project(Arel::Nodes::Grouping.new(Arel.sql(inner_query.to_sql)).as(column_alias))
unscoped do
select(column)
select(column_alias)
.with
.recursive(cte.to_arel)
.from(cte.alias_to(arel_table))
.where(arel_column.not_eq(nil)) # filtering out the last NULL value
.where.not(column_alias => nil) # filtering out the last NULL value
end
end
private
def extract_column_and_alias_and_select(column, arel_table)
case column
when Arel::Nodes::As
[column.left, column.right, column]
when Arel::Attributes::Attribute
[column, column.name, column.name]
else
[arel_table[column.to_s], column.to_s, column.to_s]
end
end
end

View File

@ -28,7 +28,7 @@ module Groups
handle_changes
handle_namespace_settings
handle_hierarchy_cache_update
group.assign_attributes(params)
group.assign_attributes(params.except(*non_assignable_group_params))
return false if group.errors.present?
@ -225,6 +225,10 @@ module Groups
Gitlab::EventStore.publish(event)
end
def non_assignable_group_params
[]
end
end
end

View File

@ -7,6 +7,10 @@
"gitlab_environment_toolkit_instance": {
"type": "boolean",
"description": "Indicates whether the instance was provisioned with the GitLab Environment Toolkit for Service Ping reporting."
},
"gitlab_product_usage_data_enabled": {
"type": "boolean",
"description": "Indicates whether the instance was provisioned with product usage data tracking."
}
}
}

View File

@ -62,9 +62,9 @@ The following table lists the GitLab Duo features, and whether they are availabl
| [Root Cause Analysis](../../user/gitlab_duo_chat/examples.md#troubleshoot-failed-cicd-jobs-with-root-cause-analysis) | {{< icon name="check-circle-dashed" >}} Beta | GitLab 17.10 and later |
| [Merge Commit Message Generation](../../user/project/merge_requests/duo_in_merge_requests.md#generate-a-merge-commit-message) | {{< icon name="check-circle-dashed" >}} Beta | GitLab 17.11 and later |
| [Summarize New Merge Request](../../user/project/merge_requests/duo_in_merge_requests.md#generate-a-description-by-summarizing-code-changes) | {{< icon name="check-circle-dashed" >}} Beta | GitLab 17.11 and later |
| [Vulnerability Explanation](../../user/application_security/vulnerabilities/_index.md#explaining-a-vulnerability) | {{< icon name="check-circle-dashed" >}} Beta | GitLab 17.11 and later |
| [Discussion Summary](../../user/discussions/_index.md#summarize-issue-discussions-with-duo-chat) | {{< icon name="dash-circle" >}} No | Not applicable |
| [GitLab Duo for the CLI](../../editor_extensions/gitlab_cli/_index.md#gitlab-duo-for-the-cli) | {{< icon name="dash-circle" >}} No | Not applicable |
| [Vulnerability Explanation](../../user/application_security/vulnerabilities/_index.md#explaining-a-vulnerability) | {{< icon name="dash-circle" >}} No | Not applicable |
| [Vulnerability Resolution](../../user/application_security/vulnerabilities/_index.md#vulnerability-resolution) | {{< icon name="dash-circle" >}} No | Not applicable |
#### Supported Duo Chat features

View File

@ -8462,6 +8462,7 @@ Input type: `NamespaceDeleteRemoteDevelopmentClusterAgentMappingInput`
| ---- | ---- | ----------- |
| <a id="mutationnamespacedeleteremotedevelopmentclusteragentmappingclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationnamespacedeleteremotedevelopmentclusteragentmappingerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
| <a id="mutationnamespacedeleteremotedevelopmentclusteragentmappingnamespaceclusteragentmapping"></a>`namespaceClusterAgentMapping` {{< icon name="warning-solid" >}} | [`NamespaceClusterAgentMapping`](#namespaceclusteragentmapping) | **Deprecated:** **Status**: Experiment. Introduced in GitLab 17.11. |
### `Mutation.namespaceSettingsUpdate`
@ -24802,6 +24803,7 @@ A software dependency used by a project.
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="dependencycomponent"></a>`component` | [`Component!`](#component) | Information about the component associated to the dependency. |
| <a id="dependencyid"></a>`id` | [`GlobalID!`](#globalid) | ID of the dependency. |
| <a id="dependencylicenses"></a>`licenses` | [`[License!]`](#license) | Licenses associated to the dependency. |
| <a id="dependencylocation"></a>`location` | [`Location`](#location) | Information about where the dependency is located. |
@ -41435,6 +41437,7 @@ AI features that can be configured through the Duo self-hosted feature settings.
| <a id="aifeaturescode_generations"></a>`CODE_GENERATIONS` | Code generation feature setting. |
| <a id="aifeaturesduo_chat"></a>`DUO_CHAT` | Duo Chat feature setting. |
| <a id="aifeaturesduo_chat_explain_code"></a>`DUO_CHAT_EXPLAIN_CODE` | Duo chat explain code feature setting. |
| <a id="aifeaturesduo_chat_explain_vulnerability"></a>`DUO_CHAT_EXPLAIN_VULNERABILITY` | Duo chat explain vulnerability feature setting. |
| <a id="aifeaturesduo_chat_fix_code"></a>`DUO_CHAT_FIX_CODE` | Duo chat fix code feature setting. |
| <a id="aifeaturesduo_chat_refactor_code"></a>`DUO_CHAT_REFACTOR_CODE` | Duo chat refactor code feature setting. |
| <a id="aifeaturesduo_chat_troubleshoot_job"></a>`DUO_CHAT_TROUBLESHOOT_JOB` | Duo chat troubleshoot job feature setting. |

View File

@ -45848,6 +45848,9 @@ definitions:
- default_on
- default_off
- never_on
amazon_q_auto_review_enabled:
type: boolean
description: Enable Amazon Q auto review for merge request
experiment_features_enabled:
type: boolean
description: Enable experiment features for this group
@ -46032,6 +46035,9 @@ definitions:
- default_on
- default_off
- never_on
amazon_q_auto_review_enabled:
type: boolean
description: Enable Amazon Q auto review for merge request
experiment_features_enabled:
type: boolean
description: Enable experiment features for this group

View File

@ -128,7 +128,7 @@ By default, this variable is set from the value of `${GITLAB_DEPENDENCY_PROXY}`.
- `GITLAB_DEPENDENCY_PROXY` is a CI/CD variable in the [`gitlab-org`](https://gitlab.com/gitlab-org) and the [`gitlab-com`](https://gitlab.com/gitlab-com) groups. It is defined as `${CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX}/`.
- `GITLAB_DEPENDENCY_PROXY_ADDRESS` is defined in the `gitlab-org/gitlab` project. It defaults to `"${GITLAB_DEPENDENCY_PROXY}"`, but is overridden in some cases (see the workaround section below).
In `gitlab-org/gitlab`, we'll use `GITLAB_DEPENDENCY_PROXY_ADDRESS` [due to a workaround](#work-around-for-when-a-pipeline-is-started-by-a-project-access-token-user). Everywhere else in the `gitlab-org` and `gitlab-com` groups, we should use `GITLAB_DEPENDENCY_PROXY` to use the Dependency Proxy:
In `gitlab-org/gitlab`, we'll use `GITLAB_DEPENDENCY_PROXY_ADDRESS` [due to a workaround](#work-around-for-when-a-pipeline-is-started-by-a-project-access-token-user). Everywhere else in the `gitlab-org` and `gitlab-com` groups, we should use `GITLAB_DEPENDENCY_PROXY` to use the Dependency Proxy. For any other project, you can rely on the `CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX` predefined CI/CD variable to enable the dependency proxy:
```yaml
# In the gitlab-org/gitlab project
@ -136,6 +136,9 @@ image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}alpine:edge
# In any other project in gitlab-org and gitlab-com groups
image: ${GITLAB_DEPENDENCY_PROXY}alpine:edge
# In projects outside of gitlab-org and gitlab-com groups
image: ${CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX}/alpine:edge
```
Forks that reside on any other personal namespaces or groups fall back to

View File

@ -48,6 +48,7 @@ To create a new compliance framework from the compliance frameworks report:
1. Select **Secure > Compliance center**.
1. On the page, select the **Frameworks** tab.
1. Select the **New framework**.
1. Select **Create blank framework**.
1. Select the **Add framework** to create compliance framework.
## Edit a compliance framework

View File

@ -110,3 +110,47 @@ To set as default (or remove the default) from [compliance framework report](com
To remove a compliance framework from one or multiple project in a group, remove the compliance framework through the
[Compliance projects report](compliance_center/compliance_projects_report.md#remove-a-compliance-framework-from-projects-in-a-group).
## Import and export compliance frameworks
{{< history >}}
- [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/16499) in GitLab 17.11.
{{< /history >}}
Download existing compliance frameworks as JSON files and upload new frameworks from JSON templates.
A library of JSON templates is available from the
[Compliance Adherence Templates](https://gitlab.com/gitlab-org/software-supply-chain-security/compliance/engineering/compliance-adherence-templates) project.
Use these templates to quickly adopt predefined compliance frameworks.
### Export a compliance framework as a JSON file
With this feature, you can share and back up compliance frameworks.
To export a compliance framework from the compliance center:
1. On the left sidebar, select **Search or go to** and find your group.
1. Select **Secure > Compliance center**.
1. On the page, select the **Frameworks** tab.
1. Locate the compliance framework you wish to export.
1. Select the vertical ellipsis ({{< icon name="ellipsis_v" >}}).
1. Select **Export as JSON file**.
The JSON file is downloaded to your local system.
### Import a compliance framework from a JSON file
With this feature, you can use shared or backed up compliance frameworks.
To import a compliance framework by using a JSON template:
1. On the left sidebar, select **Search or go to** and find your group.
1. Select **Secure > Compliance center**.
1. On the page, select the **Frameworks** tab.
1. Select **New framework**.
1. Select **Import framework**.
1. In the dialog that appears, select the JSON file from your local system.
If the import is successful, the new compliance framework appears in the list. Any errors are displayed for correction.

View File

@ -144,3 +144,27 @@ ensure it is valid. For example, `Destination-Project-Path` is normalized to `de
You might receive the error `command exited with error code 15 and Unable to save [FILTERED] into [FILTERED]` in logs
when migrating projects by using direct transfer. If you receive this error, you can safely ignore it. GitLab retries
the exited command.
## Error: `Batch export [batch_number] from source instance failed`
On the destination instance, you might encounter the following error:
```plaintext
Batch export [batch_number] from source instance failed: [source instance error]
```
This error occurs when the source instance fails to export some records.
The most common reasons are:
- Insufficient disk space
- Multiple interruptions of Sidekiq jobs due to insufficient memory
- Database statement timeout
To resolve this issue:
1. Identify and fix the problem on the source instance.
1. Delete the partially imported project or group from the destination instance and initiate a new import.
For more information about the relations and batches that failed to export,
use the export status API endpoints for [projects](../../../api/project_relations_export.md#export-status)
and [groups](../../../api/group_relations_export.md#export-status) on the source instance.

View File

@ -15,7 +15,12 @@ title: Import project from repository by URL
You can import your existing repositories by providing the Git URL. You can't import GitLab issues and merge requests
this way. Other methods provide more complete import methods.
If the repository is too large, the import can timeout.
If the repository is too large, the import might time out.
You can import your Git repository by:
- [Using the UI](#import-a-project-by-using-the-ui)
- [Using the API](#import-a-project-by-using-the-api)
## Prerequisites
@ -32,7 +37,7 @@ If the repository is too large, the import can timeout.
- If importing a private repository, an access token for authenticated access to the source repository might be required
instead of a password.
## Import project by URL
## Import a project by using the UI
1. On the left sidebar, at the top, select **Create new** ({{< icon name="plus" >}}) and **New project/repository**.
1. Select **Import project**.
@ -43,3 +48,46 @@ If the repository is too large, the import can timeout.
1. Select **Create project**.
Your newly created project is displayed.
### Import a timed-out project
Imports of large repositories might time out after three hours.
To import a timed-out project:
1. Clone the repository.
```shell
git clone --mirror https://example.com/group/project.git
```
The `--mirror` option ensures all branches, tags, and refs are copied.
1. Add the new remote repository.
```shell
cd repository.git
git remote add new-origin https://gitlab.com/group/project.git
```
1. Push everything to the new remote repository.
```shell
git push --mirror new-origin
```
## Import a project by using the API
You can use the [Projects API](../../../api/projects.md#create-a-project) to import a Git repository:
```shell
curl --location "https://gitlab.example.com/api/v4/projects/" \
--header 'Content-Type: application/json' \
--header 'Authorization: Bearer <your-token>' \
--data-raw '{
"description": "New project description",
"path": "new_project_path",
"import_url": "https://username:password@example.com/group/project.git"
}'
```
Some providers do not allow a password and instead require an access token.

View File

@ -29,7 +29,7 @@ module ActiveContext
class_methods do
# @abstract Implement .transform in subclass to handle query transformation
def transform(_node)
def transform(_collection, _node)
raise NotImplementedError, "#{name} must implement .transform"
end
end

View File

@ -18,7 +18,7 @@ module ActiveContext
def search(collection:, query:)
raise ArgumentError, "Expected Query object, you used #{query.class}" unless query.is_a?(ActiveContext::Query)
es_query = Processor.transform(query)
es_query = Processor.transform(collection, query)
res = client.search(index: collection, body: es_query)
QueryResult.new(res)
end

View File

@ -12,7 +12,7 @@ module ActiveContext
# @return [Hash] The Elasticsearch query DSL
# @example
# Processor.transform(ActiveContext::Query.filter(status: 'active'))
def self.transform(node)
def self.transform(_collection, node)
new.process(node)
end

View File

@ -21,7 +21,7 @@ module ActiveContext
def search(collection:, query:)
raise ArgumentError, "Expected Query object, you used #{query.class}" unless query.is_a?(ActiveContext::Query)
es_query = Processor.transform(query)
es_query = Processor.transform(collection, query)
res = client.search(index: collection, body: es_query)
QueryResult.new(res)
end

View File

@ -12,7 +12,7 @@ module ActiveContext
# @return [Hash] The Opensearch query DSL
# @example
# Processor.transform(ActiveContext::Query.filter(status: 'active'))
def self.transform(node)
def self.transform(_collection, node)
new.process(node)
end

View File

@ -21,11 +21,14 @@ module ActiveContext
setup_connection_pool
end
def search(_query)
with_connection do |conn|
res = conn.execute('SELECT * FROM pg_stat_activity')
QueryResult.new(res)
def search(collection:, query:)
raise ArgumentError, "Expected Query object, you used #{query.class}" unless query.is_a?(ActiveContext::Query)
sql = Processor.transform(collection, query)
res = with_connection do |conn|
conn.execute(sql)
end
QueryResult.new(res)
end
def bulk_process(operations)

View File

@ -0,0 +1,118 @@
# frozen_string_literal: true
module ActiveContext
module Databases
module Postgresql
class Processor
include ActiveContext::Databases::Concerns::Processor
# Transforms a query node into a PostgreSQL query using ActiveRecord
def self.transform(collection, node)
ActiveContext.adapter.client.with_model_for(collection) do |model|
relation = new(model).process(node)
relation.to_sql
end
end
def initialize(model)
@model = model
@base_relation = model.all
end
# Processes a query node and returns the corresponding ActiveRecord relation
def process(node)
case node.type
when :filter then process_filter(node.value)
when :prefix then process_prefix(node.value)
when :and then process_and(node.children)
when :or then process_or(node.children)
when :knn then process_knn(node)
when :limit then process_limit(node)
else
raise ArgumentError, "Unsupported node type: #{node.type}"
end
end
private
attr_reader :model, :base_relation
def process_filter(conditions)
relation = base_relation
conditions.each do |key, value|
relation = relation.where(key => value)
end
relation
end
def process_prefix(conditions)
relation = base_relation
conditions.each do |key, value|
relation = relation.where("#{model.connection.quote_column_name(key)} LIKE ?", "#{value}%")
end
relation
end
def process_and(children)
relation = base_relation
children.each do |child|
relation = relation.merge(process(child))
end
relation
end
def process_or(children)
if contains_knn?(children)
process_or_with_knn(children)
else
process_simple_or(children)
end
end
def contains_knn?(children)
children.any? { |child| child.type == :knn }
end
def process_or_with_knn(children)
knn_children, non_knn_children = children.partition { |child| child.type == :knn }
relation = non_knn_children.empty? ? base_relation : process_simple_or(non_knn_children)
process_knn(knn_children.first, relation)
end
def process_simple_or(children)
# Start with the first child as the base relation: WHERE X
relation = process(children.first)
# OR with each subsequent child
children[1..].each do |child|
relation = relation.or(process(child))
end
relation
end
def process_knn(node, relation = base_relation)
# Start with base relation or filtered relation if there are children
relation = node.children.any? ? process(node.children.first) : relation
column = node.value[:target]
vector = node.value[:vector]
limit = node.value[:limit]
vector_str = "[#{vector.join(',')}]"
relation
.order(Arel.sql("#{model.connection.quote_column_name(column)} <=> #{model.connection.quote(vector_str)}"))
.limit(limit)
end
def process_limit(node)
child_relation = process(node.children.first)
# Create a subquery
subquery = child_relation.arel.as('subq')
model.unscoped.select('subq.*').from(subquery).limit(node.value)
end
end
end
end
end

View File

@ -1,107 +0,0 @@
# frozen_string_literal: true
module ActiveContext
class Query
# WARNING: This is a toy example processor that is NOT safe for production use.
# It is vulnerable to SQL injection attacks because it directly interpolates user input into SQL strings.
# For a production-safe implementation, please use proper SQL parameter binding
# (see ActiveRecord::Sanitization, PG::Connection#exec_params, etc.)
#
# Examples of vulnerabilities:
# - Direct string interpolation of values (e.g., "#{k} = '#{v}'")
# - Unquoted identifiers (column names)
# - Direct interpolation of arrays and limits
class ProcessorExample
include ActiveContext::Databases::Concerns::Processor
def self.transform(node)
new.process(node)
end
def process(node)
case node.type
when :filter then process_filter(node.value)
when :prefix then process_prefix(node.value)
when :or then process_or(node)
when :and then process_and(node.children)
when :knn then process_knn(node)
when :limit then process_limit(node)
else
raise "Unknown node type: #{node.type}"
end
end
private
def process_filter(conditions)
conditions.map { |k, v| "#{k} = '#{v}'" }.join(" AND ")
end
def process_prefix(conditions)
conditions.map { |k, v| "#{k} LIKE '#{v}%'" }.join(" AND ")
end
def process_or(node)
if contains_knn?(node)
process_or_with_knn(node)
else
process_simple_or(node.children)
end
end
def process_simple_or(children)
children.map { |child| "(#{process(child)})" }.join(" OR ")
end
def process_or_with_knn(node)
knn_child = find_knn_child(node)
conditions = build_or_conditions(node, knn_child)
build_knn_query(knn_child, conditions)
end
def process_and(children)
children.map { |child| "(#{process(child)})" }.join(" AND ")
end
def process_knn(node)
conditions = node.children.any? ? "WHERE #{process(node.children.first)}" : ""
build_knn_query(node, conditions)
end
def process_limit(node)
"SELECT * FROM (#{process(node.children.first)}) subq LIMIT #{node.value}"
end
def contains_knn?(node)
node.children.any? { |child| child.type == :knn }
end
def find_knn_child(node)
node.children.find { |child| child.type == :knn }
end
def build_or_conditions(node, knn_child)
conditions = node.children.filter_map do |child|
next if child == knn_child
"(#{process(child)})"
end.join(" OR ")
conditions.empty? ? "" : "WHERE #{conditions}"
end
def build_knn_query(node, conditions)
target = node.value[:target]
vector = node.value[:vector]
limit = node.value[:limit]
[
"SELECT * FROM items",
conditions,
"ORDER BY #{target} <-> '[#{vector.join(',')}]'",
"LIMIT #{limit}"
].reject(&:empty?).join(" ")
end
end
end
end

View File

@ -231,6 +231,9 @@ RSpec.describe ActiveContext::Databases::Postgresql::Client do
allow(raw_connection).to receive(:server_version).and_return(120000)
allow(ActiveContext::Databases::Postgresql::QueryResult).to receive(:new)
allow(ActiveContext::Databases::Postgresql::Processor).to receive(:transform)
.and_return('SELECT * FROM pg_stat_activity')
end
it 'executes query and returns QueryResult' do
@ -238,7 +241,7 @@ RSpec.describe ActiveContext::Databases::Postgresql::Client do
expect(ActiveContext::Databases::Postgresql::QueryResult)
.to receive(:new).with(query_result)
client.search('test query')
client.search(collection: 'test', query: ActiveContext::Query.filter(project_id: 1))
end
end

View File

@ -0,0 +1,185 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ActiveContext::Databases::Postgresql::Processor, feature_category: :global_search do
let(:collection) { 'items' }
let(:client) { instance_double(ActiveContext::Databases::Postgresql::Client) }
let(:adapter) { instance_double(ActiveContext::Databases::Postgresql::Adapter, client: client) }
let(:connection) { double(quote: double, quote_column_name: double) }
let(:relation) { double.as_null_object }
let(:model_class) { double(connection: connection, all: relation, unscoped: relation) }
before do
allow(client).to receive(:with_model_for).with(collection).and_yield(model_class)
allow(ActiveContext).to receive(:adapter).and_return(adapter)
end
shared_examples 'a SQL transformer' do |query, expected_sql|
it 'generates the expected SQL' do
allow(relation).to receive(:to_sql).and_return(expected_sql)
result = described_class.transform(collection, query)
expect(result).to eq(expected_sql)
end
end
context 'with filter queries' do
it_behaves_like 'a SQL transformer',
ActiveContext::Query.filter(status: 'active', project_id: 123),
"SELECT \"items\".* FROM \"items\" WHERE \"items\".\"status\" = 'active' AND \"items\".\"project_id\" = 123"
it_behaves_like 'a SQL transformer',
ActiveContext::Query.filter(project_id: [1, 2, 3]),
"SELECT \"items\".* FROM \"items\" WHERE \"items\".\"project_id\" IN (1, 2, 3)"
it_behaves_like 'a SQL transformer',
ActiveContext::Query.filter(status: 'active', project_id: [1, 2, 3], category: 'product'),
"SELECT \"items\".* FROM \"items\" WHERE \"items\".\"status\" = 'active' " \
"AND \"items\".\"project_id\" IN (1, 2, 3) AND \"items\".\"category\" = 'product'"
end
context 'with prefix queries' do
it_behaves_like 'a SQL transformer',
ActiveContext::Query.prefix(name: 'test', path: 'foo/'),
"SELECT \"items\".* FROM \"items\" WHERE (\"name\" LIKE 'test%') AND (\"path\" LIKE 'foo/%')"
end
context 'with AND queries' do
it_behaves_like 'a SQL transformer',
ActiveContext::Query.and(
ActiveContext::Query.filter(status: %w[active pending]),
ActiveContext::Query.filter(category: 'product')
),
"SELECT \"items\".* FROM \"items\" WHERE \"items\".\"status\" IN ('active', 'pending') " \
"AND \"items\".\"category\" = 'product'"
it_behaves_like 'a SQL transformer',
ActiveContext::Query.and(
ActiveContext::Query.filter(status: 'active'),
ActiveContext::Query.prefix(name: 'test')
),
"SELECT \"items\".* FROM \"items\" WHERE \"items\".\"status\" = 'active' AND (\"name\" LIKE 'test%')"
context 'when containing KNN' do
it_behaves_like 'a SQL transformer',
ActiveContext::Query.and(
ActiveContext::Query.knn(
target: 'embedding',
vector: [0.1, 0.2],
limit: 5
),
ActiveContext::Query.filter(status: 'active')
),
"SELECT \"items\".* FROM \"items\" WHERE \"items\".\"status\" = 'active' " \
"ORDER BY \"embedding\" <=> '[0.1,0.2]' LIMIT 5"
end
end
context 'with OR queries' do
it_behaves_like 'a SQL transformer',
ActiveContext::Query.or(
ActiveContext::Query.filter(project_id: [1, 2, 3]),
ActiveContext::Query.filter(status: 'active')
),
"SELECT \"items\".* FROM \"items\" WHERE (\"items\".\"project_id\" IN (1, 2, 3) " \
"OR \"items\".\"status\" = 'active')"
it_behaves_like 'a SQL transformer',
ActiveContext::Query.or(
ActiveContext::Query.filter(status: 'active'),
ActiveContext::Query.prefix(name: 'test')
),
"SELECT \"items\".* FROM \"items\" WHERE (\"items\".\"status\" = 'active' OR \"name\" LIKE 'test%')"
context 'when containing KNN' do
it_behaves_like 'a SQL transformer',
ActiveContext::Query.or(
ActiveContext::Query.knn(
target: 'embedding',
vector: [0.1, 0.2],
limit: 5
)
),
"SELECT \"items\".* FROM \"items\" ORDER BY \"embedding\" <=> '[0.1,0.2]' LIMIT 5"
it_behaves_like 'a SQL transformer',
ActiveContext::Query.or(
ActiveContext::Query.knn(
target: 'embedding',
vector: [0.1, 0.2],
limit: 5
),
ActiveContext::Query.filter(status: 'active')
),
"SELECT \"items\".* FROM \"items\" WHERE \"items\".\"status\" = 'active' " \
"ORDER BY \"embedding\" <=> '[0.1,0.2]' LIMIT 5"
end
end
context 'with KNN queries' do
it_behaves_like 'a SQL transformer',
ActiveContext::Query.knn(
target: 'embedding',
vector: [0.1, 0.2],
limit: 5
),
"SELECT \"items\".* FROM \"items\" ORDER BY \"embedding\" <=> '[0.1,0.2]' LIMIT 5"
it_behaves_like 'a SQL transformer',
ActiveContext::Query.filter(status: 'active').knn(
target: 'embedding',
vector: [0.1, 0.2],
limit: 5
),
"SELECT \"items\".* FROM \"items\" WHERE \"items\".\"status\" = 'active' " \
"ORDER BY \"embedding\" <=> '[0.1,0.2]' LIMIT 5"
it_behaves_like 'a SQL transformer',
ActiveContext::Query.filter(project_id: [1, 2, 3]).knn(
target: 'embedding',
vector: [0.1, 0.2],
limit: 5
),
"SELECT \"items\".* FROM \"items\" WHERE \"items\".\"project_id\" IN (1, 2, 3) " \
"ORDER BY \"embedding\" <=> '[0.1,0.2]' LIMIT 5"
it_behaves_like 'a SQL transformer',
ActiveContext::Query.and(
ActiveContext::Query.filter(status: 'active'),
ActiveContext::Query.filter(category: 'product')
).knn(
target: 'embedding',
vector: [0.1, 0.2],
limit: 5
),
"SELECT \"items\".* FROM \"items\" WHERE \"items\".\"status\" = 'active' " \
"AND \"items\".\"category\" = 'product' ORDER BY \"embedding\" <=> '[0.1,0.2]' LIMIT 5"
it_behaves_like 'a SQL transformer',
ActiveContext::Query.and(
ActiveContext::Query.filter(status: 'active'),
ActiveContext::Query.prefix(name: 'test')
).knn(
target: 'embedding',
vector: [0.1, 0.2],
limit: 5
),
"SELECT \"items\".* FROM \"items\" WHERE \"items\".\"status\" = 'active' AND (\"name\" LIKE 'test%') " \
"ORDER BY \"embedding\" <=> '[0.1,0.2]' LIMIT 5"
end
context 'with limit queries' do
it_behaves_like 'a SQL transformer',
ActiveContext::Query.filter(status: 'active').limit(10),
"SELECT subq.* FROM (SELECT \"items\".* FROM \"items\" WHERE \"items\".\"status\" = 'active') subq LIMIT 10"
it_behaves_like 'a SQL transformer',
ActiveContext::Query.knn(
target: 'embedding',
vector: [0.1, 0.2],
limit: 5
).limit(10),
"SELECT subq.* FROM (SELECT \"items\".* FROM \"items\" " \
"ORDER BY \"embedding\" <=> '[0.1,0.2]' LIMIT 5) subq LIMIT 10"
end
end

View File

@ -9,7 +9,7 @@ RSpec.shared_examples 'a query processor' do
expect(described_class).to receive(:new).and_return(processor)
expect(processor).to receive(:process).with(query)
described_class.transform(query)
described_class.transform(double, query)
end
end

View File

@ -83,14 +83,38 @@ module Gitlab
tagging_models.each do |tagging_model|
tagging_model.include EachBatch
delete_duplicate_taggings(tagging_model, tag_remap)
bad_tag_ids.each do |bad_tag_id|
tagging_model.where(tag_id: bad_tag_id).each_batch(of: TAGGING_BATCH_SIZE) do |batch|
batch.update_all(tag_id: tag_remap.fetch(bad_tag_id)) unless dry_run
end
logger.info(
"Updated tag_id #{bad_tag_id} on #{tagging_model.table_name} records to #{tag_remap.fetch(bad_tag_id)}"
)
logger.info(
"Updated tag_id #{bad_tag_id} on #{tagging_model.table_name} records to #{tag_remap.fetch(bad_tag_id)}"
)
end
end
end
end
def taggings_with_fk(model_record)
case model_record
when ::Ci::BuildTag
model_record.class.where(build_id: model_record.build_id)
when ::Ci::RunnerTagging
model_record.class.where(runner_id: model_record.runner_id)
end
end
def delete_duplicate_taggings(tagging_model, tag_remap)
tagging_model.where(tag_id: tag_remap.keys).each_batch(of: TAGGING_BATCH_SIZE) do |batch|
batch.each do |bad_tag_id_row|
existing_tag_id = tag_remap.fetch(bad_tag_id_row.tag_id)
next unless taggings_with_fk(bad_tag_id_row).where(tag_id: existing_tag_id).exists?
next if dry_run
taggings_with_fk(bad_tag_id_row).where(tag_id: bad_tag_id_row.tag_id).delete_all
end
end
end

View File

@ -6589,15 +6589,15 @@ msgstr ""
msgid "An %{link_start}alert%{link_end} with the same fingerprint is already open. To change the status of this alert, resolve the linked alert."
msgstr ""
msgid "An Administrator has set the maximum expiration date to %{maxDate}. %{helpLinkStart}Learn more%{helpLinkEnd}."
msgstr ""
msgid "An administrator added this OAuth application "
msgstr ""
msgid "An administrator changed the password for your GitLab account on %{link_to}."
msgstr ""
msgid "An administrator has set the maximum expiration date to %{maxDate}. %{helpLinkStart}Learn more%{helpLinkEnd}."
msgstr ""
msgid "An alert has been resolved in %{project_path}."
msgstr ""
@ -12966,6 +12966,9 @@ msgstr ""
msgid "Clear templates search input"
msgstr ""
msgid "Clear the date to create access tokens without expiration."
msgstr ""
msgid "Clear this checkbox to use a personal access token instead."
msgstr ""

View File

@ -65,7 +65,7 @@
"@gitlab/fonts": "^1.3.0",
"@gitlab/query-language-rust": "0.5.2",
"@gitlab/svgs": "3.126.0",
"@gitlab/ui": "111.9.1",
"@gitlab/ui": "111.10.0",
"@gitlab/vue-router-vue3": "npm:vue-router@4.5.0",
"@gitlab/vuex-vue3": "npm:vuex@4.1.0",
"@gitlab/web-ide": "^0.0.1-dev-20250320115735",

View File

@ -22,7 +22,7 @@ gem 'parallel', '~> 1.26', '>= 1.26.3'
gem 'rainbow', '~> 3.1.1'
gem 'rspec-parameterized', '~> 1.0.2'
gem 'octokit', '~> 9.2.0', require: false
gem "faraday-retry", "~> 2.2", ">= 2.2.1"
gem "faraday-retry", "~> 2.3"
gem 'zeitwerk', '~> 2.7', '>= 2.7.2'
gem 'influxdb-client', '~> 3.2'
gem 'terminal-table', '~> 3.0.2', require: false

View File

@ -84,7 +84,7 @@ GEM
faraday-net_http (>= 2.0, < 3.2)
faraday-net_http (3.1.0)
net-http
faraday-retry (2.2.1)
faraday-retry (2.3.0)
faraday (~> 2.0)
ffi (1.17.0)
ffi-compiler (1.0.1)
@ -363,7 +363,7 @@ DEPENDENCIES
deprecation_toolkit (~> 2.2.2)
factory_bot (~> 6.5.1)
faker (~> 3.5, >= 3.5.1)
faraday-retry (~> 2.2, >= 2.2.1)
faraday-retry (~> 2.3)
fog-core (= 2.1.0)
fog-google (~> 1.24, >= 1.24.1)
gitlab-orchestrator!

View File

@ -45,18 +45,24 @@ module QA
end
end
context 'when the file is a text file',
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/390006' do
let(:file_name) { 'text_file.txt' }
context 'with a new file', quarantine: {
only: { pipeline: %i[staging staging-canary] },
issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/527274',
type: :flaky
} do
context 'when the file is a text file',
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/390006' do
let(:file_name) { 'text_file.txt' }
it_behaves_like 'upload a file'
end
it_behaves_like 'upload a file'
end
context 'when the file is an image',
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/390007' do
let(:file_name) { 'dk.png' }
context 'when the file is an image',
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/390007' do
let(:file_name) { 'dk.png' }
it_behaves_like 'upload a file'
it_behaves_like 'upload a file'
end
end
end
end

View File

@ -602,7 +602,9 @@ tests = [
expected: [
'ee/spec/graphql/types/remote_development/namespace_cluster_agent_mapping_type_spec.rb',
'ee/spec/requests/api/graphql/mutations/remote_development/' \
'namespace_cluster_agent_mapping_operations/create_spec.rb'
'namespace_cluster_agent_mapping_operations/create_spec.rb',
'ee/spec/requests/api/graphql/mutations/remote_development/' \
'namespace_cluster_agent_mapping_operations/delete_spec.rb'
]
},
## END Remote development GraphQL types

View File

@ -3,7 +3,7 @@ import VueApollo from 'vue-apollo';
import { shallowMount } from '@vue/test-utils';
import Participants from '~/sidebar/components/participants/participants.vue';
import WorkItemAssignees from '~/work_items/components/work_item_assignees.vue';
import WorkItemDates from '~/work_items/components/work_item_dates.vue';
import WorkItemDates from 'ee_else_ce/work_items/components/work_item_dates.vue';
import WorkItemLabels from '~/work_items/components/work_item_labels.vue';
import WorkItemMilestone from '~/work_items/components/work_item_milestone.vue';
import WorkItemParent from '~/work_items/components/work_item_parent.vue';

View File

@ -1,46 +1,37 @@
import { GlDatepicker, GlFormRadio } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import WorkItemDates from '~/work_items/components/work_item_dates.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mockTracking } from 'helpers/tracking_helper';
import { stubComponent } from 'helpers/stub_component';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { Mousetrap } from '~/lib/mousetrap';
import { newDate } from '~/lib/utils/datetime/date_calculation_utility';
import WorkItemDates from '~/work_items/components/work_item_dates.vue';
import WorkItemSidebarWidget from '~/work_items/components/shared/work_item_sidebar_widget.vue';
import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
import {
updateWorkItemMutationErrorResponse,
updateWorkItemMutationResponse,
} from 'jest/work_items/mock_data';
import WorkItemSidebarWidget from '~/work_items/components/shared/work_item_sidebar_widget.vue';
import { updateWorkItemMutationErrorResponse, updateWorkItemMutationResponse } from '../mock_data';
Vue.use(VueApollo);
describe('WorkItemDates component', () => {
describe('WorkItemDueDate component', () => {
let wrapper;
const startDateShowSpy = jest.fn();
const workItemId = 'gid://gitlab/WorkItem/1';
const updateWorkItemMutationHandler = jest.fn().mockResolvedValue(updateWorkItemMutationResponse);
const findWorkItemSidebarWidget = () => wrapper.findComponent(WorkItemSidebarWidget);
const findStartDatePicker = () => wrapper.findByTestId('start-date-picker');
const findDueDatePicker = () => wrapper.findByTestId('due-date-picker');
const findApplyButton = () => wrapper.findByTestId('apply-button');
const findEditButton = () => wrapper.findByTestId('edit-button');
const findStartDateValue = () => wrapper.findByTestId('start-date-value');
const findDueDateValue = () => wrapper.findByTestId('due-date-value');
const findFixedRadioButton = () => wrapper.findAllComponents(GlFormRadio).at(0);
const findInheritedRadioButton = () => wrapper.findAllComponents(GlFormRadio).at(1);
const createComponent = ({
canUpdate = false,
dueDate = null,
startDate = null,
isFixed = false,
shouldRollUp = true,
mutationHandler = updateWorkItemMutationHandler,
} = {}) => {
wrapper = shallowMountExtended(WorkItemDates, {
@ -49,19 +40,10 @@ describe('WorkItemDates component', () => {
canUpdate,
dueDate,
startDate,
isFixed,
shouldRollUp,
workItemType: 'Epic',
workItemType: 'Task',
workItem: updateWorkItemMutationResponse.data.workItemUpdate.workItem,
fullPath: 'gitlab-org/gitlab',
},
stubs: {
GlDatepicker: stubComponent(GlDatepicker, {
methods: {
show: startDateShowSpy,
},
}),
GlFormRadio,
WorkItemSidebarWidget,
},
});
@ -76,7 +58,7 @@ describe('WorkItemDates component', () => {
expect(findStartDateValue().classes('gl-text-subtle')).toBe(false);
});
it('renders `None` when it is not passed to the component`', () => {
it('renders `None` when it is not passed to the component`', () => {
createComponent();
expect(findStartDateValue().text()).toBe('None');
@ -107,96 +89,10 @@ describe('WorkItemDates component', () => {
expect(findDueDatePicker().exists()).toBe(false);
});
describe('when both start and due date are fixed', () => {
it('checks "fixed" radio button', async () => {
createComponent({ isFixed: true });
await nextTick();
expect(findFixedRadioButton().props('checked')).toBe('fixed');
});
});
describe('when both start and due date are inherited', () => {
it('checks "inherited" radio button', async () => {
createComponent({ isFixed: false });
await nextTick();
expect(findInheritedRadioButton().props('checked')).toBe('inherited');
});
});
});
describe('rollupType updates', () => {
describe('when isFixed prop changes', () => {
it('updates rollupType from inherited to fixed', async () => {
createComponent({ isFixed: false });
await nextTick();
expect(findInheritedRadioButton().props('checked')).toBe('inherited');
await wrapper.setProps({ isFixed: true });
expect(findFixedRadioButton().props('checked')).toBe('fixed');
});
it('updates rollupType from fixed to inherited', async () => {
createComponent({ isFixed: true });
await nextTick();
expect(findFixedRadioButton().props('checked')).toBe('fixed');
await wrapper.setProps({ isFixed: false });
expect(findInheritedRadioButton().props('checked')).toBe('inherited');
});
});
});
describe.each`
radioType | findRadioButton | isFixed
${'fixed'} | ${findFixedRadioButton} | ${true}
${'inherited'} | ${findInheritedRadioButton} | ${false}
`('$radioType radio button', ({ radioType, findRadioButton, isFixed }) => {
it('renders as enabled when user can update work item', () => {
it('passes edit permission to WorkItemSidebarWidget', () => {
createComponent({ canUpdate: true });
expect(findRadioButton().attributes('disabled')).toBeUndefined();
});
it('renders as disabled when user cannot update work item', () => {
createComponent();
expect(findRadioButton().attributes().disabled).toBe('true');
});
describe('when clicked', () => {
let trackingSpy;
beforeEach(async () => {
trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
createComponent({ canUpdate: true, isFixed });
findRadioButton().vm.$emit('change');
await nextTick();
});
it(`calls mutation to update rollup type to ${radioType}`, () => {
expect(updateWorkItemMutationHandler).toHaveBeenCalledWith({
input: {
id: workItemId,
startAndDueDateWidget: { isFixed },
},
});
});
it('tracks updating the rollup type', () => {
expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'updated_rollup_type', {
category: TRACKING_CATEGORY_SHOW,
label: 'item_rolledup_dates',
property: 'type_Epic',
});
});
expect(findWorkItemSidebarWidget().props('canUpdate')).toBe(true);
});
});
@ -250,6 +146,38 @@ describe('WorkItemDates component', () => {
});
});
describe('when escape key is pressed', () => {
beforeEach(async () => {
createComponent({
canUpdate: true,
dueDate: '2022-12-31',
startDate: '2022-12-31',
});
findEditButton().vm.$emit('click');
await nextTick();
findStartDatePicker().vm.$emit('input', new Date('2022-01-01T00:00:00.000Z'));
});
it('widget is closed and dates are updated, when date picker is focused', async () => {
findStartDatePicker().trigger('keydown.esc');
await nextTick();
expect(updateWorkItemMutationHandler).toHaveBeenCalled();
expect(findStartDatePicker().exists()).toBe(false);
});
it('widget is closed and dates are updated, when date picker is not focused', async () => {
findStartDatePicker().trigger('blur');
Mousetrap.trigger('esc');
await nextTick();
expect(updateWorkItemMutationHandler).toHaveBeenCalled();
expect(findStartDatePicker().exists()).toBe(false);
});
});
describe('when updating date', () => {
describe('when dates are changed', () => {
let trackingSpy;
@ -279,17 +207,25 @@ describe('WorkItemDates component', () => {
startAndDueDateWidget: {
dueDate: '2022-12-31',
startDate: '2022-01-01',
isFixed: true,
},
},
});
});
it('edit button is disabled when mutation is in flight', () => {
expect(findEditButton().props('disabled')).toBe(true);
});
it('edit button is enabled after mutation is resolved', async () => {
await waitForPromises();
expect(findEditButton().props('disabled')).toBe(false);
});
it('tracks updating the dates', () => {
expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'updated_dates', {
category: TRACKING_CATEGORY_SHOW,
label: 'item_rolledup_dates',
property: 'type_Epic',
label: 'item_dates',
property: 'type_Task',
});
});
});
@ -343,33 +279,10 @@ describe('WorkItemDates component', () => {
it('emits an error', () => {
expect(wrapper.emitted('error')).toEqual([
['Something went wrong while updating the epic. Please try again.'],
['Something went wrong while updating the task. Please try again.'],
]);
});
});
});
describe('when escape key is pressed', () => {
beforeEach(async () => {
createComponent({
canUpdate: true,
dueDate: '2022-12-31',
startDate: '2022-12-31',
});
findEditButton().vm.$emit('click');
await nextTick();
findStartDatePicker().vm.$emit('input', new Date('2022-01-01T00:00:00.000Z'));
});
it('widget is closed and dates are updated, when date picker is focused', async () => {
findStartDatePicker().trigger('keydown.esc');
await nextTick();
expect(updateWorkItemMutationHandler).toHaveBeenCalled();
expect(findStartDatePicker().exists()).toBe(false);
});
});
});
});

View File

@ -39,6 +39,12 @@ RSpec.describe Gitlab::Database::DeduplicateCiTags, :aggregate_failures, feature
SQL
end
let(:tagging_ids) do
[
ci_build_tagging_ids, ci_runner_tagging_ids
]
end
describe '#execute' do
subject(:execute) { service.execute }
@ -69,7 +75,21 @@ RSpec.describe Gitlab::Database::DeduplicateCiTags, :aggregate_failures, feature
let(:build2_id) { create_build }
let(:pending_build2_id) { create_pending_build(build2_id, [duplicate_tag_ids.second, tag_ids.third]) }
let!(:duplicate_ci_build_tagging_id) do
let!(:ci_build2_tagging1_id) do
connection.select_value(<<~SQL)
INSERT INTO p_ci_build_tags (build_id, tag_id, partition_id, project_id)
VALUES (#{build2_id}, #{tag_ids.second}, #{partition_id}, #{project_id}) RETURNING id;
SQL
end
let!(:duplicate_ci_build2_tagging2_id) do
connection.select_value(<<~SQL)
INSERT INTO p_ci_build_tags (build_id, tag_id, partition_id, project_id)
VALUES (#{build2_id}, #{duplicate_tag_ids.second}, #{partition_id}, #{project_id}) RETURNING id;
SQL
end
let!(:duplicate_ci_build2_tagging3_id) do
connection.select_value(<<~SQL)
INSERT INTO p_ci_build_tags (build_id, tag_id, partition_id, project_id)
VALUES (#{build2_id}, #{duplicate_tag_ids.second}, #{partition_id}, #{project_id}) RETURNING id;
@ -85,16 +105,19 @@ RSpec.describe Gitlab::Database::DeduplicateCiTags, :aggregate_failures, feature
let(:duplicate_tagging_ids) do
[
duplicate_ci_build_tagging_id, duplicate_ci_runner_tagging_id, pending_build2_id
ci_build2_tagging1_id, duplicate_ci_build2_tagging2_id, duplicate_ci_runner_tagging_id,
pending_build2_id
]
end
around do |example|
connection.transaction do
tag_ids
tagging_ids
# allow a scenario where multiple tags with same name coexist
connection.execute('DROP INDEX index_tags_on_name')
# allow a scenario where same build with same tag id coexist
connection.execute('DROP INDEX index_p_ci_build_tags_on_tag_id_and_build_id_and_partition_id')
duplicate_tagging_ids
@ -107,9 +130,14 @@ RSpec.describe Gitlab::Database::DeduplicateCiTags, :aggregate_failures, feature
.to change { table_count('tags') }.by(-2)
.and not_change { ci_runner_tagging_relationship_for(ci_runner_tagging_ids.second) }
.and not_change { ci_pending_build_tag_ids_for(pending_build1_id) }
.and change { ci_build_tagging_relationship_for(duplicate_ci_build_tagging_id) }
.and not_change { ci_build_tagging_relationship_for(ci_build2_tagging1_id) }
.from(build2_id => tag_ids.second)
.and change { ci_build_tagging_relationship_for(duplicate_ci_build2_tagging2_id) }
.from(build2_id => duplicate_tag_ids.second)
.to(build2_id => tag_ids.second)
.to({})
.and change { ci_build_tagging_relationship_for(duplicate_ci_build2_tagging3_id) }
.from(build2_id => duplicate_tag_ids.second)
.to({})
.and change { ci_pending_build_tag_ids_for(pending_build2_id) }
.from([duplicate_tag_ids.second, tag_ids.third])
.to([tag_ids.second, tag_ids.third])
@ -132,7 +160,7 @@ RSpec.describe Gitlab::Database::DeduplicateCiTags, :aggregate_failures, feature
.and not_change { ci_runner_tagging_relationship_for(ci_runner_tagging_ids.second) }
.and not_change { ci_runner_tagging_relationship_for(ci_runner_tagging_ids.third) }
.and not_change { ci_pending_build_tag_ids_for(pending_build2_id) }
.and not_change { ci_build_tagging_relationship_for(duplicate_ci_build_tagging_id) }
.and not_change { ci_build_tagging_relationship_for(duplicate_ci_build2_tagging2_id) }
.and not_change { ci_runner_tagging_relationship_for(duplicate_ci_runner_tagging_id) }
# Index wasn't recreated because we're in dry run mode

View File

@ -1,5 +1,4 @@
# frozen_string_literal: true
# frozen_string_literal
require 'spec_helper'
@ -23,7 +22,11 @@ RSpec.describe LooseIndexScan, type: :model do
let_it_be(:issue_5) { create(:issue, author: user_3) }
context 'loading distinct author_ids' do
subject(:author_ids) { issue_model.loose_index_scan(column: :author_id, order: order).pluck(:author_id) }
subject(:author_ids) do
issue_model
.loose_index_scan(column: issue_model.arel_table[:author_id].as("example_alias"), order: order)
.pluck(:example_alias)
end
shared_examples 'assert distinct values example' do
it 'loads the distinct values in the correct order' do
@ -54,4 +57,78 @@ RSpec.describe LooseIndexScan, type: :model do
end
end
end
context 'using Arel column objects' do
subject(:author_ids) do
issue_model.loose_index_scan(column: issue_model.arel_table[:author_id], order: order).pluck(:author_id)
end
context 'when using ascending order' do
let(:order) { :asc }
let(:expected_order) { [user_1.id, user_2.id, user_3.id] }
it 'loads the distinct values in the correct order' do
expect(author_ids).to eq(expected_order)
end
end
context 'when using descending order' do
let(:order) { :desc }
let(:expected_order) { [user_3.id, user_2.id, user_1.id] }
it 'loads the distinct values in the correct order' do
expect(author_ids).to eq(expected_order)
end
end
end
context 'loading distinct values from a different column' do
# Create completely separate test data for this context
let_it_be(:project_context_user_1) { create(:user) }
let_it_be(:project_context_user_2) { create(:user) }
let_it_be(:project_context_user_3) { create(:user) }
let_it_be(:project_1) { create(:project) }
let_it_be(:project_2) { create(:project) }
let_it_be(:project_3) { create(:project) }
# Create issues with projects already assigned
let_it_be(:project_issue_1) { create(:issue, project: project_1, author: project_context_user_2) }
let_it_be(:project_issue_2) { create(:issue, project: project_2, author: project_context_user_1) }
let_it_be(:project_issue_3) { create(:issue, project: project_2, author: project_context_user_1) }
let_it_be(:project_issue_4) { create(:issue, project: project_1, author: project_context_user_2) }
let_it_be(:project_issue_5) { create(:issue, project: project_3, author: project_context_user_3) }
# Only use the issues created specifically for this test
let(:project_test_scope) do
issue_ids = [
project_issue_1.id,
project_issue_2.id,
project_issue_3.id,
project_issue_4.id,
project_issue_5.id
]
issue_model.where(id: issue_ids)
end
subject(:project_ids) { project_test_scope.loose_index_scan(column: :project_id, order: order).pluck(:project_id) }
context 'when using ascending order' do
let(:order) { :asc }
let(:expected_order) { [project_1.id, project_2.id, project_3.id] }
it 'loads the distinct values in the correct order' do
expect(project_ids).to eq(expected_order)
end
end
context 'when using descending order' do
let(:order) { :desc }
let(:expected_order) { [project_3.id, project_2.id, project_1.id] }
it 'loads the distinct values in the correct order' do
expect(project_ids).to eq(expected_order)
end
end
end
end

View File

@ -185,6 +185,7 @@ mapping:
test:
- 'ee/spec/graphql/types/remote_development/namespace_cluster_agent_mapping_type_spec.rb'
- 'ee/spec/requests/api/graphql/mutations/remote_development/namespace_cluster_agent_mapping_operations/create_spec.rb'
- 'ee/spec/requests/api/graphql/mutations/remote_development/namespace_cluster_agent_mapping_operations/delete_spec.rb'
## END Remote development GraphQL types

View File

@ -1441,10 +1441,10 @@
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-3.126.0.tgz#1c0bb95c11de808b78afd05dc95aca258c3b39f0"
integrity sha512-7X8uzitNn7NDcVy+FVCw8npMNEUpLGHTO5Z+BJZqVILj/FD+0WveYdPxAEVa9hXYQn5qXWM0ZAknzB9LM6Id8w==
"@gitlab/ui@111.9.1":
version "111.9.1"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-111.9.1.tgz#df6d5a6fc9359189f241e801ce85675dbc6a2b5f"
integrity sha512-PUqRP38IMre06TV1do+7brIMvzLROiAZXa42oQU9VkVcfAZ88NNi3chkifTHP/sMTRfYni2Ooj9gvV8DBax6LQ==
"@gitlab/ui@111.10.0":
version "111.10.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-111.10.0.tgz#2425dd3427846eae797270586d9b3c82c2dda415"
integrity sha512-330KozNQJM7xY9bGI4gsJ3psxF1WgAVmeQEnlawmhyaHk0fPb+pzeA4qUNdkwhQcqIV46jz5wyLSRxnCa7W3Gg==
dependencies:
"@floating-ui/dom" "1.4.3"
echarts "^5.3.2"