Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-03-26 18:07:35 +00:00
parent 621bb6eed5
commit 731c36f7b6
68 changed files with 623 additions and 364 deletions

View File

@ -651,6 +651,11 @@ rspec:merge-auto-explain-logs:
needs: !reference ["rspec:coverage", "needs"]
script:
- scripts/merge-auto-explain-logs
- |
if [[ -f "$RSPEC_AUTO_EXPLAIN_LOG_PATH" && "$CI_COMMIT_REF_NAME" == "$CI_DEFAULT_BRANCH" ]]; then
source scripts/gitlab_component_helpers.sh
create_and_upload_auto_explain_package
fi
artifacts:
name: auto-explain-logs
expire_in: 31d
@ -929,6 +934,31 @@ rspec-ee system pg16 single-db-sec-connection:
- .rspec-ee-system-parallel
- .rails:rules:single-db-sec-connection-ee
# Integration tests with Elasticsearch and PG production versions
# These run on merge requests which meet certain conditions and the nightly pipeline
rspec-ee unit pg16 es8:
extends:
- .rspec-ee-base-pg16-es8
- .rspec-ee-unit-parallel
rules:
- !reference [".rails:rules:run-search-tests", rules]
- !reference [".rails:rules:default-branch-schedule-nightly--code-backstage-ee-only", rules]
rspec-ee integration pg16 es8:
extends:
- .rspec-ee-base-pg16-es8
- .rspec-ee-integration-parallel
rules:
- !reference [".rails:rules:run-search-tests", rules]
- !reference [".rails:rules:default-branch-schedule-nightly--code-backstage-ee-only", rules]
rspec-ee system pg16 es8:
extends:
- .rspec-ee-base-pg16-es8
- .rspec-ee-system-parallel
rules:
- !reference [".rails:rules:run-search-tests", rules]
- !reference [".rails:rules:default-branch-schedule-nightly--code-backstage-ee-only", rules]
# EE: default refs (MRs, default branch, schedules) jobs #
##################################################
@ -1108,8 +1138,8 @@ rspec-ee system pg15:
- .rspec-ee-system-parallel
# PG16
# Integration tests with Elastic Search and the actual PG production version (PG16)
# https://gitlab.com/gitlab-org/quality/engineering-productivity/team/-/issues/534
# Integration tests with supported Elasticsearch and OpenSearch versions
# and the actual PG production version (PG16)
rspec-ee unit pg16 opensearch1:
extends:
- .rspec-ee-base-pg16-opensearch1
@ -1146,23 +1176,7 @@ rspec-ee system pg16 opensearch2:
- .rspec-ee-system-parallel
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
rspec-ee unit pg16 es8:
extends:
- .rspec-ee-base-pg16-es8
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
- .rspec-ee-unit-parallel
rspec-ee integration pg16 es8:
extends:
- .rspec-ee-base-pg16-es8
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
- .rspec-ee-integration-parallel
rspec-ee system pg16 es8:
extends:
- .rspec-ee-base-pg16-es8
- .rails:rules:default-branch-schedule-nightly--code-backstage-ee-only
- .rspec-ee-system-parallel
# Production version tests for ES and PG in nightly pipeline are defined in the previous section
# PG17
# Note: PG17 brought us close to the limit of nightly jobs, with 1946 out of 2000.

View File

@ -108,6 +108,9 @@
.if-merge-request-labels-run-all-e2e: &if-merge-request-labels-run-all-e2e
if: '($CI_PIPELINE_SOURCE == "merge_request_event" && $CI_MERGE_REQUEST_EVENT_TYPE != "merge_train") && $CI_MERGE_REQUEST_LABELS =~ /pipeline:run-all-e2e/'
.if-merge-request-labels-run-search-tests: &if-merge-request-labels-run-search-tests
if: '($CI_PIPELINE_SOURCE == "merge_request_event" && $CI_MERGE_REQUEST_EVENT_TYPE != "merge_train") && $CI_MERGE_REQUEST_LABELS =~ /pipeline:run-search-tests/'
.if-merge-request-labels-run-observability-e2e-tests-main-branch: &if-merge-request-labels-run-observability-e2e-tests-main-branch
if: '($CI_PIPELINE_SOURCE == "merge_request_event" && $CI_MERGE_REQUEST_EVENT_TYPE != "merge_train") && $CI_MERGE_REQUEST_LABELS =~ /pipeline:run-observability-e2e-tests-main-branch/'
@ -797,11 +800,14 @@
.rails:rules:run-search-tests:
rules:
- !reference [".rails:rules:default-branch-schedule-nightly--code-backstage-ee-only", rules]
- <<: *if-merge-request-labels-group-global-search
changes: *search-backend-patterns
- <<: *if-merge-request-labels-run-search-tests
changes: *ci-patterns
- <<: *if-merge-request-labels-group-global-search
changes: *ci-patterns
- <<: *if-merge-request-labels-run-search-tests
changes: *search-backend-patterns
- <<: *if-merge-request-labels-group-global-search
changes: *search-backend-patterns
.rails:rules:ee-and-foss-default-rules:
rules:

View File

@ -1 +1 @@
a9cdf8a33308b7734ea08810cb9be586b696c3fb
29c3fd80cb7a73cb76ac1aaedb61e01289148e47

View File

@ -127,6 +127,9 @@ export default {
removeButtonCategory() {
return this.isMobile ? 'secondary' : 'tertiary';
},
removeButtonSize() {
return this.isMobile ? 'medium' : 'small';
},
variables() {
return this.form[this.refParam]?.variables ?? [];
},
@ -340,9 +343,9 @@ export default {
<template v-if="variables.length > 1">
<gl-button
v-if="canRemove(index)"
size="small"
class="gl-shrink-0"
data-testid="remove-ci-variable-row"
:size="removeButtonSize"
:category="removeButtonCategory"
:aria-label="s__('CiVariables|Remove variable')"
@click="removeVariable(index)"

View File

@ -5,7 +5,9 @@ import {
GlFormGroup,
GlFormInput,
GlFormTextarea,
GlIcon,
} from '@gitlab/ui';
import { GlBreakpointInstance } from '@gitlab/ui/dist/utils';
import { __ } from '~/locale';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import InputsAdoptionBanner from '~/ci/common/pipeline_inputs/inputs_adoption_banner.vue';
@ -21,6 +23,7 @@ export default {
GlFormInput,
GlFormTextarea,
InputsAdoptionBanner,
GlIcon,
},
mixins: [glFeatureFlagsMixin()],
props: {
@ -41,7 +44,7 @@ export default {
showVarValues: false,
};
},
formElementClasses: 'md:gl-mr-3 gl-mb-3 gl-basis-1/4 gl-shrink-0 gl-flex-grow-0',
formElementClasses: '!gl-block gl-basis-1/4 gl-shrink-0 gl-flex-grow-0',
// it's used to prevent the overwrite if 'gl-h-7' or '!gl-h-7' were used
textAreaStyle: { height: '32px' },
typeOptions: [
@ -67,6 +70,15 @@ export default {
showVarSecurityBtn() {
return this.editing && this.hasExistingScheduleVariables;
},
isMobile() {
return ['sm', 'xs'].includes(GlBreakpointInstance.getBreakpointSize());
},
removeButtonCategory() {
return this.isMobile ? 'secondary' : 'tertiary';
},
removeButtonSize() {
return this.isMobile ? 'medium' : 'small';
},
},
watch: {
variables: {
@ -133,10 +145,10 @@ export default {
class="gl-mt-0"
:feature-name="$options.userCalloutsFeatureName"
/>
<div v-for="(variable, index) in variables" :key="`var-${index}`">
<div v-for="(variable, index) in variables" :key="`var-${index}`" class="gl-mb-4">
<div
v-if="!variable.destroy"
class="gl-mb-3 gl-flex gl-flex-col gl-items-stretch gl-pb-2 md:gl-flex-row md:gl-items-start"
class="gl-flex gl-flex-col gl-items-stretch gl-gap-4 md:gl-flex-row"
data-testid="ci-variable-row"
>
<gl-collapsible-listbox
@ -163,7 +175,7 @@ export default {
v-if="displayHiddenChars(variable)"
value="*****************"
disabled
class="gl-mb-3 !gl-h-7"
class="!gl-h-7"
data-testid="pipeline-form-ci-variable-hidden-value"
/>
@ -172,7 +184,7 @@ export default {
v-model="variable.value"
:placeholder="s__('CiVariables|Input variable value')"
:aria-label="s__('CiVariables|Input variable value')"
class="gl-mb-3 gl-min-h-7"
class="gl-min-h-7"
:style="$options.textAreaStyle"
:no-resize="false"
data-testid="pipeline-form-ci-variable-value"
@ -182,17 +194,19 @@ export default {
<template v-if="variables.length > 1">
<gl-button
v-if="canRemove(index)"
class="gl-mb-3 md:gl-ml-3"
class="gl-shrink-0"
data-testid="remove-ci-variable-row"
variant="danger"
category="secondary"
icon="clear"
:size="removeButtonSize"
:category="removeButtonCategory"
:aria-label="s__('CiVariables|Remove variable')"
@click="removeVariable(index)"
/>
>
<gl-icon class="!gl-mr-0" name="remove" />
<span class="md:gl-hidden">{{ s__('CiVariables|Remove variable') }}</span>
</gl-button>
<gl-button
v-else
class="gl-invisible gl-mb-3 gl-hidden md:gl-ml-3 md:gl-block"
class="gl-invisible gl-hidden md:gl-block"
icon="clear"
:aria-label="s__('CiVariables|Remove variable')"
/>

View File

@ -1,57 +0,0 @@
import Sortable from 'sortablejs';
import { createAlert } from '~/alert';
import axios from '~/lib/utils/axios_utils';
import { s__ } from '~/locale';
import { getSortableDefaultOptions, sortableStart } from '~/sortable/utils';
const updateIssue = (url, { move_before_id, move_after_id }) =>
axios
.put(`${url}/reorder`, {
move_before_id,
move_after_id,
})
.catch(() => {
createAlert({
message: s__("ManualOrdering|Couldn't save the order of the issues"),
});
});
const initManualOrdering = () => {
const issueList = document.querySelector('.manual-ordering');
if (!issueList || !(gon.current_user_id > 0)) {
return;
}
Sortable.create(
issueList,
getSortableDefaultOptions({
scroll: true,
fallbackTolerance: 1,
dataIdAttr: 'data-id',
fallbackOnBody: false,
group: {
name: 'issues',
},
draggable: 'li.issue',
onStart: () => {
sortableStart();
},
onUpdate: (event) => {
const el = event.item;
const url = el.getAttribute('url');
const prev = el.previousElementSibling;
const next = el.nextElementSibling;
const beforeId = prev && parseInt(prev.dataset.id, 10);
const afterId = next && parseInt(next.dataset.id, 10);
updateIssue(url, { move_after_id: afterId, move_before_id: beforeId });
},
}),
);
};
export default initManualOrdering;

View File

@ -1,21 +1,5 @@
import { createFilteredSearchTokenKeys } from '~/filtered_search/issuable_filtered_search_token_keys';
import { mountIssuesDashboardApp } from '~/issues/dashboard';
import initManualOrdering from '~/issues/manual_ordering';
import { FILTERED_SEARCH } from '~/filtered_search/constants';
import initFilteredSearch from '~/pages/search/init_filtered_search';
import { initNewResourceDropdown } from '~/vue_shared/components/new_resource_dropdown/init_new_resource_dropdown';
const IssuableFilteredSearchTokenKeys = createFilteredSearchTokenKeys({
disableReleaseFilter: true,
});
initFilteredSearch({
page: FILTERED_SEARCH.ISSUES,
filteredSearchTokenKeys: IssuableFilteredSearchTokenKeys,
useDefaultState: true,
});
initNewResourceDropdown();
initManualOrdering();
mountIssuesDashboardApp();

View File

@ -70,6 +70,14 @@ export default {
isLoading() {
return this.$apollo.queries.wikiPage.loading;
},
queryData() {
const { defaultClient: cache } = this.$apollo.provider.clients;
return cache.readQuery({
query: wikiPageQuery,
variables: this.queryVariables,
});
},
},
mounted() {
eventHub.$on(EVENT_EDIT_WIKI_START, () => {
@ -87,35 +95,9 @@ export default {
removePlaceholder() {
this.placeholderNote = {};
},
async updateDiscussions(discussion) {
// apollo does not update cache when a discussion is added so we have to do it manually
if (!this.$apollo.provider) return;
const { defaultClient: cache } = this.$apollo.provider.clients;
const queryData = cache.readQuery({
query: wikiPageQuery,
variables: this.queryVariables,
});
const data = produce(queryData, (draft) => {
draft.wikiPage.discussions.nodes.push({
...discussion,
replyId: null,
resolvable: false,
resolved: false,
resolvedAt: null,
resolvedBy: null,
});
});
cache.writeQuery({
query: wikiPageQuery,
variables: this.queryVariables,
data,
});
},
getDiscussionKey(key, stringModifier) {
return [key, stringModifier].join('-');
},
handleDeleteNote(noteId, discussionId) {
const discussionIndex = this.discussions.findIndex(
(discussion) => discussion.id === discussionId,
@ -123,15 +105,74 @@ export default {
if (discussionIndex === -1) return;
if (this.discussions[discussionIndex].notes.nodes.length === 1) {
const discussion = this.discussions[discussionIndex];
const isLastNote = discussion.notes.nodes.length === 1;
// Update local state
if (isLastNote) {
// Remove entire discussion if it's the last note
this.discussions = this.discussions.filter(({ id }) => id !== discussionId);
} else {
const updatedNotes = this.discussions[discussionIndex].notes.nodes.filter(
// Remove only the specific note
this.discussions[discussionIndex].notes.nodes = discussion.notes.nodes.filter(
({ id }) => id !== noteId,
);
this.discussions[discussionIndex].notes.nodes = updatedNotes;
}
this.updateCache({ discussionId, noteId, isLastNote });
},
updateCache({ discussion, discussionId, noteId, isLastNote }) {
if (!this.$apollo.provider) return;
const { defaultClient: cache } = this.$apollo.provider.clients;
const queryData = cache.readQuery({
query: wikiPageQuery,
variables: this.queryVariables,
});
if (!queryData) return;
let data;
if (discussion) {
data = produce(queryData, (draft) => {
draft.wikiPage.discussions.nodes.push({
...discussion,
replyId: null,
resolvable: false,
resolved: false,
resolvedAt: null,
resolvedBy: null,
});
});
} else {
data = produce(queryData, (draft) => {
const cachedDiscussionIndex = draft.wikiPage.discussions.nodes.findIndex(
(d) => d.id === discussionId,
);
if (cachedDiscussionIndex === -1) return;
if (isLastNote) {
// Remove entire discussion if it's the last note
draft.wikiPage.discussions.nodes = draft.wikiPage.discussions.nodes.filter(
(d) => d.id !== discussionId,
);
} else {
// Remove only the specific note
draft.wikiPage.discussions.nodes[cachedDiscussionIndex].notes.nodes =
draft.wikiPage.discussions.nodes[cachedDiscussionIndex].notes.nodes.filter(
(note) => note.id !== noteId,
);
}
});
}
cache.writeQuery({
query: wikiPageQuery,
variables: this.queryVariables,
data,
});
},
},
};
@ -147,7 +188,7 @@ export default {
:note-id="noteableId"
@creating-note:start="setPlaceHolderNote"
@creating-note:done="removePlaceholder"
@creating-note:success="updateDiscussions"
@creating-note:success="(discussion) => updateCache({ discussion })"
/>
</template>
<template v-if="placeholderNote.body" #place-holder-note>

View File

@ -8,6 +8,7 @@ import { SCOPE_BLOB, SEARCH_TYPE_ZOEKT } from '~/search/sidebar/constants/index'
import { parseBoolean } from '~/lib/utils/common_utils';
import { DEFAULT_FETCH_CHUNKS } from '../constants';
import { RECEIVE_NAVIGATION_COUNT } from '../../store/mutation_types';
import EmptyResult from './result_empty.vue';
import StatusBar from './status_bar.vue';
import ZoektBlobResults from './zoekt_blob_results.vue';
@ -24,6 +25,7 @@ export default {
ZoektBlobResults,
StatusBar,
GlAlert,
EmptyResult,
},
data() {
return {
@ -96,12 +98,14 @@ export default {
{{ $options.i18n.blobDataFetchError }}
</gl-alert>
<section v-else-if="isBlobScope && isZoektSearch">
<status-bar :blob-search="blobSearch" :has-results="hasResults" :is-loading="isLoading" />
<status-bar v-if="!isLoading && !hasError" :blob-search="blobSearch" />
<zoekt-blob-results
v-if="hasResults"
:blob-search="blobSearch"
:has-results="hasResults"
:is-loading="isLoading"
/>
<empty-result v-if="!hasResults && !isLoading" />
</section>
</div>
</template>

View File

@ -19,19 +19,6 @@ export default {
type: Object,
required: true,
},
hasResults: {
type: Boolean,
required: true,
},
isLoading: {
type: Boolean,
required: true,
},
error: {
type: String,
required: false,
default: '',
},
},
computed: {
...mapState(['query', 'groupInitialJson', 'projectInitialJson', 'repositoryRef']),
@ -53,9 +40,6 @@ export default {
resultsTotal() {
return this.blobSearch?.matchCount;
},
showBar() {
return this.hasResults && !this.hasError && !this.isLoading;
},
getBaseURL() {
return getBaseURL();
},
@ -80,9 +64,6 @@ export default {
this?.resultsTotal ?? 0,
);
},
hasError() {
return Boolean(this.error);
},
},
methods: {
handleInput(selected) {
@ -93,7 +74,7 @@ export default {
</script>
<template>
<div v-if="showBar" class="search-results-status gl-my-4">
<div class="search-results-status gl-my-4">
<gl-sprintf v-if="!query.project_id && !query.group_id" :message="resultsSimple">
<template #resultsTotal>{{ resultsTotal }}</template>
<template #term

View File

@ -5,7 +5,7 @@ import { mapState, mapActions } from 'vuex';
import BlobHeader from '~/search/results/components/blob_header.vue';
import BlobFooter from '~/search/results/components/blob_footer.vue';
import BlobBody from '~/search/results/components/blob_body.vue';
import EmptyResult from '~/search/results/components/result_empty.vue';
import {
getSystemColorScheme,
listenSystemColorSchemeChange,
@ -22,7 +22,6 @@ export default {
BlobFooter,
BlobBody,
GlPagination,
EmptyResult,
GlLoadingIcon,
},
props: {
@ -125,7 +124,6 @@ export default {
</template>
</gl-card>
</div>
<empty-result v-else-if="!hasResults && !isLoading" />
<template v-if="hasResults && !isLoading">
<gl-pagination
v-model="pagination"

View File

@ -35,6 +35,12 @@ export default {
8,
);
const dayOfWeek = this.currentTime.getDay();
const daysUntilMonday = dayOfWeek === 0 ? 1 : 8 - dayOfWeek;
const untilNextWeek = new Date(this.currentTime);
untilNextWeek.setDate(this.currentTime.getDate() + daysUntilMonday);
untilNextWeek.setHours(8, 0, 0, 0);
const toTimeString = (date) => localeDateFormat.asTime.format(date);
return [
@ -81,6 +87,20 @@ export default {
this.$emit('snooze-until', untilTomorrow);
},
},
{
text: s__('Todos|Until next week'),
formattedDate: sprintf(s__('Todos|%{day}, %{time}'), {
day: dateFormat(untilNextWeek, 'DDDD'),
time: toTimeString(untilNextWeek),
}),
action: () => {
this.track(INSTRUMENT_TODO_ITEM_CLICK, {
label: 'snooze_until_next_week',
});
this.$emit('snooze-until', untilNextWeek);
},
},
],
},
{

View File

@ -10,7 +10,6 @@ module Ci
before_validation :assign_project_id, on: :create
validates :name, presence: true, length: { maximum: 255 }, uniqueness: { scope: :pipeline_schedule_id }
validates :value, presence: true
# We validate the size of the serialized value because encryption is expensive.
# The maximum permitted size is equivalent to the maximum size permitted for an interpolated input value.

View File

@ -4,6 +4,16 @@ module Namespaces
class ProjectNamespace < Namespace
self.allow_legacy_sti_class = true
SYNCED_ATTRIBUTES = %w[
name
path
namespace_id
namespace
visibility_level
shared_runners_enabled
organization_id
].freeze
# These aliases are added to make it easier to sync parent/parent_id attribute with
# project.namespace/project.namespace_id attribute.
#
@ -37,11 +47,9 @@ module Namespaces
end
def sync_attributes_from_project(project)
attribute_list = %w[name path namespace_id namespace visibility_level shared_runners_enabled organization_id]
attributes_to_sync = project
.changes
.slice(*attribute_list)
.slice(*SYNCED_ATTRIBUTES)
.transform_values { |val| val[1] }
# if visibility_level is not set explicitly for project, it defaults to 0,

View File

@ -3549,8 +3549,7 @@ class Project < ApplicationRecord
end
def job_token_policies_enabled?
Feature.enabled?(:add_policies_to_ci_job_token, self) ||
namespace.root_ancestor.namespace_settings&.job_token_policies_enabled?
namespace.root_ancestor.namespace_settings&.job_token_policies_enabled?
end
strong_memoize_attr :job_token_policies_enabled?
@ -3792,7 +3791,7 @@ class Project < ApplicationRecord
end
def sync_project_namespace?
(changes.keys & %w[name path namespace_id namespace visibility_level shared_runners_enabled]).any? && project_namespace.present?
(changes.keys & Namespaces::ProjectNamespace::SYNCED_ATTRIBUTES).any? && project_namespace.present?
end
def reload_project_namespace_details

View File

@ -2815,7 +2815,7 @@ class User < ApplicationRecord
end
def email_allowed_by_restrictions
return if placeholder? || import_user?
return if placeholder? || import_user? || security_policy_bot?
error = validate_admin_signup_restrictions(email)

View File

@ -6,15 +6,21 @@ module Members
raise Gitlab::Access::AccessDeniedError unless can_request_access?(source)
source.members.create(
access_level: Gitlab::Access::DEVELOPER,
access_level: default_access_level,
user: current_user,
requested_at: Time.current.utc)
end
private
def default_access_level
Gitlab::Access::DEVELOPER
end
def can_request_access?(source)
can?(current_user, :request_access, source)
end
end
end
Members::RequestAccessService.prepend_mod_with('Members::RequestAccessService')

View File

@ -25,7 +25,7 @@ module Users
return ::ServiceResponse.success(message: _('User has already been deactivated')) if user.deactivated?
unless user.can_be_deactivated?
unless can_be_deactivated?(user)
message = _(
'The user you are trying to deactivate has been active in the past %{minimum_inactive_days} days ' \
'and cannot be deactivated')
@ -49,6 +49,11 @@ module Users
attr_reader :current_user
# Wrapped in a method to allow overriding in subclasses
def can_be_deactivated?(user)
user.can_be_deactivated?
end
def allowed?
return true if @skip_authorization

View File

@ -16,6 +16,8 @@ module WorkItems
end
def initialize_callbacks!(work_item)
# reset system notes timestamp
work_item.system_note_timestamp = nil
@callbacks = original_work_item.widgets.filter_map do |widget|
sync_data_callback_class = widget.class.sync_data_callback_class
next if sync_data_callback_class.nil?

View File

@ -17,12 +17,14 @@ module WorkItems
handle_children
end
# Nothing to delete for children as we relink existing child links
# to the new parent in `relink_children_to_target_work_item`
def post_move_cleanup
return unless work_item.parent_link.present?
# Cleanup children linked to moved item when that is an issue because we are currently creating those
# child items in the destination namespace anyway. If we decide to relink child items for Issue WIT
# then we should not be deleting them here.
work_item.child_links.each { |child_link| child_link.work_item.destroy! } if work_item.work_item_type.issue?
work_item.parent_link.destroy!
# cleanup parent link
work_item.parent_link&.destroy!
end
private
@ -40,13 +42,13 @@ module WorkItems
end
def handle_children
# We only support moving child items for the issue work item type for now
return move_children if work_item.work_item_type.issue?
# Relink child items to the new work item first. This will be used for any work item type other than issue.
# For issue work item type we will relink child items, but then also actually move the child items(tasks) to
# For issue work item type we actually move the child items(tasks) to
# the destination namespace. This is to keep feature parity with existing move functionality on issue.
relink_children_to_target_work_item
# We only support moving child items for the issue work item type for now
move_children if work_item.work_item_type.issue?
end
def relink_children_to_target_work_item
@ -67,13 +69,9 @@ module WorkItems
end
def move_children
# Reload as the child_links association was just changed by relinking child items
# in `relink_children_to_target_work_item`
target_work_item.reset
# We iterate over "new work item" child links now, because we have relinked child items from moved work item
# to the new work item in `relink_children_to_target_work_item`.
target_work_item.child_links.each do |link|
work_item.child_links.each do |link|
# This is going to be moved to an async worker. This is planned as a follow-up up iteration for a bunch of
# other work item association data. The async implementation for move will be tracked in:
# https://gitlab.com/groups/gitlab-org/-/epics/15934

View File

@ -1,9 +0,0 @@
---
name: add_policies_to_ci_job_token
feature_issue_url: https://gitlab.com/gitlab-org/govern/authorization/team-tasks/-/issues/69
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167872
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/497754
milestone: '17.5'
group: group::authorization
type: wip
default_enabled: false

View File

@ -886,6 +886,9 @@ Gitlab.ee do
Settings.cron_jobs['vulnerability_statistics_schedule_worker'] ||= {}
Settings.cron_jobs['vulnerability_statistics_schedule_worker']['cron'] ||= '15 1,20 * * *'
Settings.cron_jobs['vulnerability_statistics_schedule_worker']['job_class'] = 'Vulnerabilities::Statistics::ScheduleWorker'
Settings.cron_jobs['vulnerability_namespace_statistics_schedule_worker'] ||= {}
Settings.cron_jobs['vulnerability_namespace_statistics_schedule_worker']['cron'] ||= '0 8 * * 0'
Settings.cron_jobs['vulnerability_namespace_statistics_schedule_worker']['job_class'] = 'Vulnerabilities::NamespaceStatistics::ScheduleWorker'
Settings.cron_jobs['vulnerability_historical_statistics_deletion_worker'] ||= {}
Settings.cron_jobs['vulnerability_historical_statistics_deletion_worker']['cron'] ||= '15 3 * * *'
Settings.cron_jobs['vulnerability_historical_statistics_deletion_worker']['job_class'] = 'Vulnerabilities::HistoricalStatistics::DeletionWorker'

View File

@ -995,6 +995,8 @@
- 1
- - vulnerabilities_namespace_historical_statistics_update_traversal_ids
- 1
- - vulnerabilities_namespace_statistics_adjustment
- 1
- - vulnerabilities_process_archived_events
- 1
- - vulnerabilities_process_bulk_dismissed_events

View File

@ -9,5 +9,5 @@ classes:
- GitlabSubscriptions::SeatAssignment
gitlab_schema: gitlab_main_cell
sharding_key:
namespace_id: namespaces
organization_id: organizations
table_size: small

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
class AddUsernamePasswordToVirtualRegistriesPackagesMavenUpstreams < Gitlab::Database::Migration[2.2]
milestone '17.11'
disable_ddl_transaction!
TABLE_NAME = :virtual_registries_packages_maven_upstreams
def up
with_lock_retries do
add_column TABLE_NAME, :username, :jsonb, null: true, if_not_exists: true
add_column TABLE_NAME, :password, :jsonb, null: true, if_not_exists: true
end
add_check_constraint TABLE_NAME,
'num_nonnulls(username, password) = 2 OR num_nulls(username, password) = 2',
check_constraint_name(TABLE_NAME, 'username_and_password', 'both_set_or_null')
end
def down
with_lock_retries do
remove_column(TABLE_NAME, :username, if_exists: true)
remove_column(TABLE_NAME, :password, if_exists: true)
end
end
end

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
class AddOrganizationIdToSubscriptionSeatAssignments < Gitlab::Database::Migration[2.2]
milestone '17.11'
DEFAULT_ORGANIZATION_ID = 1
enable_lock_retries!
def change
add_column :subscription_seat_assignments, :organization_id, :bigint, default: DEFAULT_ORGANIZATION_ID, null: false
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddSubscriptionSeatAssignmentOrganizationIdIndex < Gitlab::Database::Migration[2.2]
milestone '17.11'
disable_ddl_transaction!
INDEX_NAME = 'index_subscription_seat_assignments_on_organization_id'
def up
add_concurrent_index :subscription_seat_assignments, :organization_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :subscription_seat_assignments, INDEX_NAME
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class AddSubscriptionSeatAssignmentOrganizationIdFk < Gitlab::Database::Migration[2.2]
milestone '17.11'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :subscription_seat_assignments, :organizations, column: :organization_id,
on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :subscription_seat_assignments, column: :organization_id
end
end
end

View File

@ -0,0 +1 @@
18b2f1d2eea6c3f233dd11ca996fc01ec62c52f755d73a44d69018e215581f93

View File

@ -0,0 +1 @@
95d47c1d6c693c29fcc3662283dd928686402e1efea087ec52cf998041c3dfc1

View File

@ -0,0 +1 @@
e59336609157a8a5ae1a8b7c8b8aa9c33162be0b01732e0eb043b5752bcc9680

View File

@ -0,0 +1 @@
104371717571df5325938552b74e7bd137b3b8b38242013bb1aebc4876e03cbf

View File

@ -23148,7 +23148,8 @@ CREATE TABLE subscription_seat_assignments (
user_id bigint NOT NULL,
last_activity_on timestamp with time zone,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL
updated_at timestamp with time zone NOT NULL,
organization_id bigint DEFAULT 1 NOT NULL
);
CREATE SEQUENCE subscription_seat_assignments_id_seq
@ -24375,8 +24376,11 @@ CREATE TABLE virtual_registries_packages_maven_upstreams (
encrypted_username_iv bytea,
encrypted_password bytea,
encrypted_password_iv bytea,
username jsonb,
password jsonb,
CONSTRAINT check_2366658457 CHECK ((octet_length(encrypted_username) <= 1020)),
CONSTRAINT check_26c0572777 CHECK ((char_length(url) <= 255)),
CONSTRAINT check_4db365ecc9 CHECK (((num_nonnulls(username, password) = 2) OR (num_nulls(username, password) = 2))),
CONSTRAINT check_a3593dca3a CHECK ((cache_validity_hours >= 0)),
CONSTRAINT check_c3977cdb0c CHECK ((octet_length(encrypted_username_iv) <= 1020)),
CONSTRAINT check_e4b6e651bf CHECK ((octet_length(encrypted_password_iv) <= 1020)),
@ -37038,6 +37042,8 @@ CREATE UNIQUE INDEX index_subscription_add_ons_on_name ON subscription_add_ons U
CREATE INDEX index_subscription_addon_purchases_on_expires_on ON subscription_add_on_purchases USING btree (expires_on);
CREATE INDEX index_subscription_seat_assignments_on_organization_id ON subscription_seat_assignments USING btree (organization_id);
CREATE INDEX index_subscription_seat_assignments_on_user_id ON subscription_seat_assignments USING btree (user_id);
CREATE INDEX index_subscription_user_add_on_assignments_on_organization_id ON subscription_user_add_on_assignments USING btree (organization_id);
@ -42798,6 +42804,9 @@ ALTER TABLE ONLY duo_workflows_checkpoints
ALTER TABLE ONLY packages_conan_package_revisions
ADD CONSTRAINT fk_b482b1a2f8 FOREIGN KEY (package_reference_id) REFERENCES packages_conan_package_references(id) ON DELETE CASCADE;
ALTER TABLE ONLY subscription_seat_assignments
ADD CONSTRAINT fk_b4bdbc61ee FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
ALTER TABLE ONLY protected_tag_create_access_levels
ADD CONSTRAINT fk_b4eb82fe3c FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;

View File

@ -144,14 +144,6 @@ You can also add a group or project to the allowlist [with the API](../../api/gr
{{< /history >}}
{{< alert type="flag" >}}
The availability of this feature is controlled by a feature flag.
For more information, see the history.
This feature is available for testing, but not ready for production use.
{{< /alert >}}
You can populate a project's allowlist using the data from the [job token authentication log](#job-token-authentication-log)
with the UI or a Rake task.

View File

@ -1629,11 +1629,12 @@ in the cluster back the request in GitLab.
## Development tips
- [Kibana](advanced_search/tips.md#kibana)
- [Viewing index status](advanced_search/tips.md#viewing-index-status)
- [Creating indices from scratch](advanced_search/tips.md#creating-all-indices-from-scratch-and-populating-with-local-data)
- [Testing migrations](advanced_search/tips.md#testing-migrations)
- [Index data](advanced_search/tips.md#index-data)
- [Kibana](advanced_search/tips.md#kibana)
- [Running tests with Elasticsearch](advanced_search/tips.md#testing)
- [Testing migrations](advanced_search/tips.md#advanced-search-migrations)
- [Viewing index status](advanced_search/tips.md#viewing-index-status)
## Troubleshooting

View File

@ -100,9 +100,18 @@ Elastic::ProcessBookkeepingService.track!(*MergeRequest.all)
Elastic::ProcessBookkeepingService.new.execute
```
## Testing migrations
## Testing
### Testing a migration that changes a mapping of an index
{{< alert type="warning" >}}
Elasticsearch tests do not run on every merge request. Add `~pipeline:run-search-tests` or `~group::global search` labels to the merge
request to run tests with the production versions of Elasticsearch and PostgreSQL.
{{< /alert >}}
### Advanced search migrations
#### Testing a migration that changes a mapping of an index
1. Make sure the index doesn't already have the changes applied. Remember the migration cron worker runs in the background so it's possible the migration was already applied.
- You can consider disabling the migration worker to have more control: `Feature.disable(:elastic_migration_worker)`.

View File

@ -185,8 +185,11 @@ prompt_template:
Once a stable prompt version is added to the AI Gateway it should not be altered. You can create a mutable version of a
prompt by adding a pre-release suffix to the file name (e.g. `1.0.1-dev.yml`). This will also prevent it from being
automatically served to clients. Then you can use a feature flag to control the rollout this new version. If your AI
action is implemented as a subclass of `AiGateway::Completions::Base`, you can achieve this by overriding the prompt
automatically served to clients. Then you can use a feature flag to control the rollout this new version. For GitLab
Duo Self-hosted, forced versions are ignored, and only versions defined in `PromptVersions` are used. This avoids
mistakenly enabling versions for models that don't have that specified version.
If your AI action is implemented as a subclass of `AiGateway::Completions::Base`, you can achieve this by overriding the prompt
version in your subclass:
```ruby

View File

@ -800,6 +800,18 @@ Single database tests run in two modes:
If you want to force tests to run with a single database, you can add the `pipeline:run-single-db` label to the merge request.
### Elasticsearch and OpenSearch versions testing
Our test suite runs against Elasticsearch 8 as GitLab.com runs on Elasticsearch 8 when certain conditions are met.
We run our test suite against Elasticsearch 7, 8 and OpenSearch 1, 2 on nightly scheduled pipelines. All
test suites use PostgreSQL 16 because there is no dependency between the database and search backend.
| Where? | Elasticsearch version | OpenSearch Version | PostgreSQL version |
|-------------------------------------------------------------------------------------------------|-----------------------|----------------------|----------------------|
| Merge requests with label `~group::global search` or `~pipeline:run-search-tests` | 8.X (production) | | 16 (default version) |
| `nightly` scheduled pipelines for the `master` branch | 7.X, 8.X (production) | 1.X, 2.X | 16 (default version) |
## Monitoring
The GitLab test suite is [monitored](../performance.md#rspec-profiling) for the `main` branch, and any branch

View File

@ -66,6 +66,7 @@ Before you can use Workflow, you must:
- The repository you want to work with should be small or medium-sized.
Workflow can be slow or fail for large repositories.
- [Successfully connect to your repository](#connect-to-your-repository).
- [Ensure an HTTP/2 connection to the Workflow service is possible](troubleshooting.md#network-issues).
{{< alert type="note" >}}

View File

@ -7,7 +7,7 @@ title: Configure GitLab Duo on a GitLab Self-Managed instance
{{< details >}}
- Offering: GitLab Self-Managed, GitLab Dedicated
- Offering: GitLab Self-Managed
{{< /details >}}

View File

@ -104,6 +104,8 @@ The following actions count as activity:
- Visiting pages in GitLab, such as dashboards, projects, issues, merge requests, or settings.
- Using the REST or GraphQL API in the scope of the group.
Dormant [enterprise users](../enterprise_user/_index.md) are not removed, but [deactivated](../../administration/moderate_users.md#deactivate-and-reactivate-users). When these users sign back in, their accounts are reactivated and their access is restored.
{{< alert type="note" >}}
Activity has not been recorded for members added before 2025-01-22. These members will not be removed until 2025-04-22, even if they have been dormant for over 90 days.

View File

@ -28,81 +28,59 @@ For more information, see the history.
{{< /alert >}}
The Web IDE is an advanced editor with commit staging.
You can use the Web IDE to make changes to multiple files directly from the GitLab UI.
For a more basic implementation, see [Web Editor](../repository/web_editor.md).
The Web IDE is an advanced editor with commit staging where you can make changes to multiple
files directly from the GitLab UI. It provides a more robust editing experience compared to the
[Web Editor](../repository/web_editor.md).
Support for [GitLab Flavored Markdown](../../markdown.md) preview in the Web IDE is proposed in
[issue 645](https://gitlab.com/gitlab-org/gitlab-vscode-extension/-/issues/645).
## Open the Web IDE
To open the Web IDE:
You can access the Web IDE through several methods.
### With a keyboard shortcut
1. On the left sidebar, select **Search or go to** and find your project.
1. Use the <kbd>.</kbd> keyboard shortcut.
### From a file or directory
To open the Web IDE from a file or directory:
1. On the left sidebar, select **Search or go to** and find your project.
1. Go to your file or directory.
1. Select **Edit > Open in Web IDE**.
### From a merge request
To open the Web IDE from a merge request:
1. On the left sidebar, select **Search or go to** and find your project.
1. Go to your merge request.
1. In the upper right, select **Code > Open in Web IDE**.
The Web IDE opens new and modified files in separate tabs and displays changes side by side.
The Web IDE opens new and modified files in separate tabs, and displays changes side by side.
To reduce load time, only 10 files with the most lines changed open automatically.
The left **Explorer** sidebar adds a merge request icon ({{< icon name="merge-request" >}}) next to new or modified files.
To view changes to a file, right-click the file and select **Compare with merge request base**.
## Open a file
## Manage files
You can use the Web IDE to open, edit, and upload multiple files.
### Open a file
To open a file by name in the Web IDE:
1. Press <kbd>Command</kbd>+<kbd>P</kbd>.
1. In the search box, enter the filename.
## Search open files
### Search open files
To search across open files in the Web IDE:
1. Press <kbd>Shift</kbd>+<kbd>Command</kbd>+<kbd>F</kbd>.
1. In the search box, enter your search term.
## View a list of modified files
To view a list of files you modified in the Web IDE:
- On the left activity bar, select **Source Control**, or
press <kbd>Control</kbd>+<kbd>Shift</kbd>+<kbd>G</kbd>.
Your `CHANGES`, `STAGED CHANGES`, and `MERGE CHANGES` are displayed.
For more information, see the [VS Code documentation](https://code.visualstudio.com/docs/sourcecontrol/overview#_commit).
## Restore uncommitted changes
You do not have to manually save any file you edit in the Web IDE.
The Web IDE stages the files you modify, so you can [commit the changes](#commit-changes).
Uncommitted changes are saved in your browser's local storage, and persist
even if you close the browser tab or refresh the Web IDE.
If your uncommitted changes are not available, you can restore the changes from local history.
To restore uncommitted changes in the Web IDE:
1. Press <kbd>Shift</kbd>+<kbd>Command</kbd>+<kbd>P</kbd>.
1. In the search box, enter `Local History: Find Entry to Restore`.
1. Select the file that contains the uncommitted changes.
## Upload a file
### Upload a file
To upload a file in the Web IDE:
@ -120,7 +98,36 @@ To upload a file in the Web IDE:
You can upload multiple files at once.
The files are uploaded and automatically added to the repository.
## Switch branches
### Restore uncommitted changes
You do not have to manually save any file you edit in the Web IDE.
The Web IDE stages the files you modify, so you can [commit the changes](#commit-changes).
Uncommitted changes are saved in your browser's local storage. They persist
even if you close the browser tab or refresh the Web IDE.
If your uncommitted changes are not available, you can restore the changes from local history.
To restore uncommitted changes in the Web IDE:
1. Press <kbd>Shift</kbd>+<kbd>Command</kbd>+<kbd>P</kbd>.
1. In the search box, enter `Local History: Find Entry to Restore`.
1. Select the file that contains the uncommitted changes.
## Use source control
You can use source control to view modified files, create and switch branches,
commit changes, and create merge requests.
### View modified files
To view a list of files you modified in the Web IDE:
- On the left activity bar, select **Source Control**, or
press <kbd>Control</kbd>+<kbd>Shift</kbd>+<kbd>G</kbd>.
Your `CHANGES`, `STAGED CHANGES`, and `MERGE CHANGES` are displayed.
For more information, see the [VS Code documentation](https://code.visualstudio.com/docs/sourcecontrol/overview#_commit).
### Switch branches
The Web IDE uses the current branch by default.
To switch branches in the Web IDE:
@ -128,7 +135,7 @@ To switch branches in the Web IDE:
1. On the bottom status bar, on the left, select the current branch name.
1. Enter or select an existing branch.
## Create a branch
### Create a branch
To create a branch from the current branch in the Web IDE:
@ -138,7 +145,7 @@ To create a branch from the current branch in the Web IDE:
If you do not have write access to the repository, **Create new branch** is not visible.
## Commit changes
### Commit changes
To commit changes in the Web IDE:
@ -147,7 +154,7 @@ To commit changes in the Web IDE:
1. Enter your commit message.
1. Commit to the current branch or [create a new branch](#create-a-branch).
## Create a merge request
### Create a merge request
To create a [merge request](../merge_requests/_index.md) in the Web IDE:
@ -156,7 +163,12 @@ To create a [merge request](../merge_requests/_index.md) in the Web IDE:
For more information, see [View missed notifications](#view-missed-notifications).
## Use the Command Palette
## Customize the Web IDE
Customize the Web IDE to match your preferences for keyboard shortcuts,
themes, settings, and synchronization.
### Use the Command Palette
You can use the Command Palette to access many commands.
To open the Command Palette and run a command in the Web IDE:
@ -164,7 +176,7 @@ To open the Command Palette and run a command in the Web IDE:
1. Press <kbd>Shift</kbd>+<kbd>Command</kbd>+<kbd>P</kbd>.
1. Enter or select the command.
## Edit settings
### Edit settings
You can use the settings editor to view and edit your user and workspace settings.
To open the settings editor in the Web IDE:
@ -174,7 +186,7 @@ To open the settings editor in the Web IDE:
In the settings editor, you can search for the settings you want to change.
## Edit keyboard shortcuts
### Edit keyboard shortcuts
You can use the keyboard shortcuts editor to view and change
the default keybindings for all available commands.
@ -191,16 +203,7 @@ In the keyboard shortcuts editor, you can search for:
Keybindings are based on your keyboard layout.
If you change your keyboard layout, existing keybindings are updated automatically.
### Use Vim keybindings
Use Vim keybindings to navigate and edit text using keyboard shortcuts from the Vim text editor.
With the [Extensions Marketplace](#extension-marketplace), you can add Vim keybindings to
the Web IDE.
To enable Vim keybindings, install the [Vim](https://open-vsx.org/extension/vscodevim/vim)
extension. For more information, see [install an extension](#install-an-extension).
## Change the color theme
### Change the color theme
You can choose between different color themes for the Web IDE.
The default theme is **GitLab Dark**.
@ -214,7 +217,7 @@ To change the color theme in the Web IDE:
The Web IDE stores your active color theme in your [user settings](#edit-settings).
## Configure sync settings
### Configure sync settings
To configure sync settings in the Web IDE:
@ -232,7 +235,7 @@ To configure sync settings in the Web IDE:
These settings sync automatically across multiple Web IDE instances.
You cannot sync user profiles or go back to an earlier version of synced settings.
## View missed notifications
### View missed notifications
When you perform actions in the Web IDE, notifications appear in the lower right.
To view any notification you might have missed:
@ -318,6 +321,14 @@ the Red Hat [YAML](https://open-vsx.org/extension/redhat/vscode-yaml) extension.
}
```
#### Use Vim keybindings
Use Vim keybindings to navigate and edit text using keyboard shortcuts from the Vim text editor.
With the Extensions Marketplace, you can add Vim keybindings to the Web IDE.
To enable Vim keybindings, install the [Vim](https://open-vsx.org/extension/vscodevim/vim)
extension. For more information, see [install an extension](#install-an-extension).
## Related topics
- [GitLab Duo Chat in the Web IDE](../../gitlab_duo_chat/_index.md#use-gitlab-duo-chat-in-the-web-ide)

View File

@ -26648,6 +26648,9 @@ msgstr ""
msgid "Geo|There was an error deleting the Geo Site"
msgstr ""
msgid "Geo|There was an error fetching the %{replicableType}. The GraphQL API call to the secondary may have failed."
msgstr ""
msgid "Geo|There was an error fetching the Geo Settings"
msgstr ""
@ -35416,9 +35419,6 @@ msgstr ""
msgid "Manual Variables"
msgstr ""
msgid "ManualOrdering|Couldn't save the order of the issues"
msgstr ""
msgid "ManualVariables|There are no manually-specified variables for this pipeline"
msgstr ""
@ -53027,6 +53027,9 @@ msgstr ""
msgid "SecurityInventory|No projects found"
msgstr ""
msgid "SecurityInventory|Project vulnerabilities"
msgstr ""
msgid "SecurityInventory|SAST"
msgstr ""
@ -59895,9 +59898,6 @@ msgstr ""
msgid "There was an error fetching stage total counts"
msgstr ""
msgid "There was an error fetching the %{replicableType}"
msgstr ""
msgid "There was an error fetching the cancelable jobs."
msgstr ""
@ -61716,6 +61716,9 @@ msgstr ""
msgid "Todos|Until later today"
msgstr ""
msgid "Todos|Until next week"
msgstr ""
msgid "Todos|Until tomorrow"
msgstr ""

View File

@ -10,7 +10,6 @@ ee/spec/frontend/analytics/cycle_analytics/vsa_settings/components/value_stream_
ee/spec/frontend/analytics/dashboards/ai_impact/components/metric_table_spec.js
ee/spec/frontend/analytics/devops_reports/devops_adoption/components/devops_adoption_app_spec.js
ee/spec/frontend/analytics/group_ci_cd_analytics/components/release_stats_card_spec.js
ee/spec/frontend/analytics/merge_request_analytics/components/throughput_chart_spec.js
ee/spec/frontend/approvals/components/security_configuration/unconfigured_security_rules_spec.js
ee/spec/frontend/approvals/mr_edit/mr_rules_spec.js
ee/spec/frontend/approvals/project_settings/project_rules_spec.js

View File

@ -224,3 +224,13 @@ function fixtures_directory_exists() {
function upload_fixtures_package() {
upload_package "${FIXTURES_PACKAGE}" "${FIXTURES_PACKAGE_URL}"
}
# Dump auto-explain logs
export AUTO_EXPLAIN_PACKAGE="auto-explain-logs.tar.gz"
export AUTO_EXPLAIN_PATH="${RSPEC_AUTO_EXPLAIN_LOG_PATH}"
export AUTO_EXPLAIN_PACKAGE_URL="${API_PACKAGES_BASE_URL}/auto-explain-logs/master/${AUTO_EXPLAIN_PACKAGE}"
function create_and_upload_auto_explain_package() {
create_package "${AUTO_EXPLAIN_PACKAGE}" "${AUTO_EXPLAIN_PATH}"
upload_package "${AUTO_EXPLAIN_PACKAGE}" "${AUTO_EXPLAIN_PACKAGE_URL}"
}

View File

@ -95,6 +95,18 @@ describe('WikiNotesApp', () => {
wrapper = shallowMountExtended(WikiNotesApp, {
apolloProvider: fakeApollo,
data() {
return {
wikiPage: {
id: 'gid://gitlab/WikiPage/1',
title: 'home',
discussions: {
nodes: [mockDiscussion('Discussion 1')],
},
},
...mockQueryResponse,
};
},
provide: {
containerId: noteableId,
noteCount: 5,
@ -256,8 +268,49 @@ describe('WikiNotesApp', () => {
const errorAlert = wrapper.findComponent(GlAlert);
expect(errorAlert.exists()).toBe(false);
});
});
it('should delete the note correctly when the WikiDiscussions emits "note-deleted" when there are replies', async () => {
describe('when "note-deleted" is fired', () => {
let discussions;
beforeEach(async () => {
discussions = {
nodes: [
mockDiscussion('Discussion 1'),
mockDiscussion('Discussion 2'),
mockDiscussion('Discussion 3 Note 1', 'Discussion 3 Note 2', 'Discussion 3 Note 3'),
],
};
await createWrapper({
mockQueryResponse: {
wikiPage: {
id: 'gid://gitlab/WikiPage/1',
title: 'home',
discussions,
},
},
});
});
it('should call write query with the correct data', async () => {
wrapper.findComponent(WikiDiscussion).vm.$emit('note-deleted');
await nextTick();
expect(apolloCache.writeQuery).toHaveBeenCalledWith({
query: wikiPageQuery,
variables: queryVariables,
data: { noteableId: '7', wikiPage },
});
});
it('should delete note correctly when there are no replies', async () => {
wrapper.findComponent(WikiDiscussion).vm.$emit('note-deleted');
await nextTick();
expect(wrapper.findAllComponents(WikiDiscussion)).toHaveLength(2);
});
it('should delete note correctly when there are replies', async () => {
const wikiDiscussions = wrapper.findAllComponents(WikiDiscussion);
// delete first note

View File

@ -10,6 +10,7 @@ import getBlobSearchQuery from '~/search/graphql/blob_search_zoekt.query.graphql
import GlobalSearchResultsApp from '~/search/results/components/app.vue';
import ZoektBlobResults from '~/search/results/components/zoekt_blob_results.vue';
import StatusBar from '~/search/results/components/status_bar.vue';
import EmptyResult from '~/search/results/components/result_empty.vue';
import mutations from '~/search/store/mutations';
import {
MOCK_QUERY,
@ -58,6 +59,7 @@ describe('GlobalSearchResultsApp', () => {
};
const findZoektBlobResults = () => wrapper.findComponent(ZoektBlobResults);
const findEmptyResult = () => wrapper.findComponent(EmptyResult);
const findStatusBar = () => wrapper.findComponent(StatusBar);
const findAlert = () => wrapper.findComponent(GlAlert);
@ -108,9 +110,14 @@ describe('GlobalSearchResultsApp', () => {
await waitForPromises();
});
it(`renders component properly`, async () => {
it(`Renders empty state`, async () => {
await waitForPromises();
expect(findZoektBlobResults().props('hasResults')).toBe(false);
expect(findZoektBlobResults().exists()).toBe(false);
expect(findEmptyResult().exists()).toBe(true);
});
it('Renders status bar', () => {
expect(findStatusBar().exists()).toBe(true);
});
});

View File

@ -185,6 +185,7 @@ describe('GlobalSearchStatusBar', () => {
);
});
});
describe('single result', () => {
beforeEach(() => {
createComponent({
@ -225,27 +226,27 @@ describe('GlobalSearchStatusBar', () => {
beforeEach(() => {
createComponent({
propsData: {
blobSearch: {
perPage: 20,
fileCount: 0,
matchCount: 0,
},
hasResults: false,
},
});
});
it('does not render the status bar', () => {
expect(wrapper.text()).toBe('');
});
});
describe('when loading', () => {
beforeEach(() => {
createComponent({
propsData: {
isLoading: true,
initialState: {
query: {
...MOCK_QUERY,
group_id: 1,
project_id: null,
search: 'test',
},
groupInitialJson,
},
});
});
it('does not render the status bar', () => {
expect(wrapper.text()).toBe('');
expect(wrapper.text()).toBe('Showing 0 code results for test in group Group Full Name');
});
});
});

View File

@ -5,7 +5,6 @@ import { GlLoadingIcon, GlCard } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ZoektBlobResults from '~/search/results/components/zoekt_blob_results.vue';
import waitForPromises from 'helpers/wait_for_promises';
import EmptyResult from '~/search/results/components/result_empty.vue';
import { MOCK_QUERY, mockGetBlobSearchQuery } from '../../mock_data';
@ -44,7 +43,6 @@ describe('ZoektBlobResults', () => {
};
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findEmptyResult = () => wrapper.findComponent(EmptyResult);
beforeEach(() => {
window.gon.user_color_mode = 'gl-light';
@ -80,19 +78,4 @@ describe('ZoektBlobResults', () => {
expect(wrapper.element).toMatchSnapshot();
});
});
describe('when component has no results', () => {
beforeEach(async () => {
createComponent({
propsData: { hasResults: false },
});
jest.advanceTimersByTime(500);
await waitForPromises();
});
it(`renders component properly`, async () => {
await nextTick();
expect(findEmptyResult().exists()).toBe(true);
});
});
});

View File

@ -52,6 +52,10 @@ describe('SnoozeTimePicker', () => {
formattedDate: 'Tomorrow, 8:00 AM',
text: 'Until tomorrow',
}),
expect.objectContaining({
formattedDate: 'Monday, 8:00 AM',
text: 'Until next week',
}),
],
name: 'Snooze',
},
@ -70,6 +74,7 @@ describe('SnoozeTimePicker', () => {
${0} | ${'2024-12-18T14:24:00.000Z'} | ${'snooze_for_one_hour'}
${1} | ${'2024-12-18T17:24:00.000Z'} | ${'snooze_until_later_today'}
${2} | ${'2024-12-19T08:00:00.000Z'} | ${'snooze_until_tomorrow'}
${3} | ${'2024-12-23T08:00:00.000Z'} | ${'snooze_until_next_week'}
`(
'triggers the snooze action with snoozeUntil = $expectedDate when clicking option #$index',
({ index, expectedDate, expectedTrackingLabel }) => {

View File

@ -32,6 +32,12 @@ RSpec.describe Mutations::Ci::JobTokenScope::AddGroupOrProject, feature_category
end
end
before do
allow_next_found_instance_of(Project) do |project|
allow(project).to receive(:job_token_policies_enabled?).and_return(true)
end
end
context 'when we add a project' do
let_it_be(:target_project) { create(:project) }
let_it_be(:target_project_path) { target_project.full_path }

View File

@ -288,6 +288,9 @@ RSpec.describe API::Helpers, feature_category: :shared do
allow(helper).to receive(:route_authentication_setting).and_return({})
allow(helper).to receive(:route_setting).with(:authorization).and_return(job_token_policies: job_token_policy)
allow(user).to receive(:ci_job_token_scope).and_return(user.set_ci_job_token_scope!(job))
allow_next_found_instance_of(Project) do |project|
allow(project).to receive(:job_token_policies_enabled?).and_return(true)
end
end
subject(:find_project!) { helper.find_project!(project.id) }

View File

@ -262,6 +262,7 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
"ai_duo_chat_events" => "https://gitlab.com/gitlab-org/gitlab/-/issues/516140",
"fork_networks" => "https://gitlab.com/gitlab-org/gitlab/-/issues/522958",
"merge_request_diff_commit_users" => "https://gitlab.com/gitlab-org/gitlab/-/issues/526725",
"subscription_seat_assignments" => "https://gitlab.com/gitlab-org/gitlab/-/issues/526769",
# All the tables below related to uploads are part of the same work to
# add sharding key to the table
"uploads" => "https://gitlab.com/gitlab-org/gitlab/-/issues/398199",

View File

@ -11,6 +11,10 @@ RSpec.describe Ci::JobToken::Allowlist, feature_category: :continuous_integratio
let(:allowlist) { described_class.new(source_project, direction: direction) }
let(:direction) { :outbound }
before do
allow(source_project).to receive(:job_token_policies_enabled?).and_return(true)
end
describe '#projects' do
subject(:projects) { allowlist.projects }

View File

@ -237,6 +237,10 @@ RSpec.describe Ci::JobToken::Scope, feature_category: :continuous_integration, f
let_it_be(:allowed_policy) { ::Ci::JobToken::Policies::POLICIES.first }
let(:accessed_project) { create_inbound_accessible_project_for_policies(target_project, [allowed_policy]) }
before do
allow(accessed_project).to receive(:job_token_policies_enabled?).and_return(true)
end
shared_examples 'capturing job token policies' do
it 'captures job token policies' do
expect(::Ci::JobToken::Authorization).to receive(:capture_job_token_policies).with(policies)

View File

@ -41,7 +41,15 @@ RSpec.describe Ci::PipelineScheduleInput, feature_category: :continuous_integrat
end
describe 'value' do
it { is_expected.to validate_presence_of(:value) }
it 'allows falsey values' do
input.value = false
expect(input).to be_valid
input.value = ''
expect(input).to be_valid
end
context 'when the serialized length of the value is less than the maximum permitted size' do
it 'is valid' do

View File

@ -361,25 +361,21 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
end
context 'with project namespaces' do
shared_examples 'creates project namespace' do
it 'automatically creates a project namespace' do
project = build(:project, path: 'hopefully-valid-path1')
project.save!
it 'automatically creates a project namespace' do
project = build(:project, path: 'hopefully-valid-path1')
project.save!
expect(project).to be_persisted
expect(project.project_namespace).to be_persisted
expect(project.project_namespace).to be_in_sync_with_project(project)
expect(project.reload.project_namespace.traversal_ids).to eq([project.namespace.traversal_ids, project.project_namespace.id].flatten.compact)
end
expect(project).to be_persisted
expect(project.project_namespace).to be_persisted
expect(project.project_namespace).to be_in_sync_with_project(project)
expect(project.reload.project_namespace.traversal_ids).to match_array([project.namespace.traversal_ids, project.project_namespace.id].flatten.compact)
end
it_behaves_like 'creates project namespace'
end
end
context 'updating a project' do
let_it_be(:project_namespace) { create(:project_namespace) }
let_it_be(:project) { project_namespace.project }
let_it_be(:project, reload: true) { project_namespace.project }
context 'when project has an associated project namespace' do
# when FF is disabled creating a project does not create a project_namespace, so we create one
@ -396,19 +392,20 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
expect(project.reload.project_namespace).to be_in_sync_with_project(project)
end
context 'when same project is being updated in 2 instances' do
it 'syncs only changed attributes' do
project1 = described_class.last
project2 = described_class.last
it 'syncs changed attributes' do
project.update!(name: "New project name", path: "new_project_path")
project_name = project1.name
project_path = project1.path
expect(project.reload.project_namespace).to be_in_sync_with_project(project)
end
project1.update!(name: project_name + "-1")
project2.update!(path: project_path + "-1")
# Regression test for edge-case introduced by 0a71dc3f33e809198d522d3cf2a28781aeac5809
it 'syncs organization_id even when it is the only change' do
new_org = create(:organization)
expect(project.reload.project_namespace).to be_in_sync_with_project(project)
end
project.parent.update_column(:organization_id, new_org.id)
project.update!(organization_id: new_org.id)
expect(project.reload.project_namespace).to be_in_sync_with_project(project)
end
end
end
@ -10031,22 +10028,16 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
subject { project.job_token_policies_enabled? }
where(:flag_enabled, :setting_enabled, :result) do
true | true | true
true | false | true
false | true | true
false | false | false
end
where(:setting_enabled) { [true, false] }
before do
project.clear_memoization(:job_token_policies_enabled?)
stub_feature_flags(add_policies_to_ci_job_token: flag_enabled)
allow(project).to receive_message_chain(:namespace, :root_ancestor, :namespace_settings,
:job_token_policies_enabled?).and_return(setting_enabled)
end
with_them do
it { is_expected.to eq(result) }
it { is_expected.to eq(setting_enabled) }
end
end

View File

@ -997,12 +997,14 @@ RSpec.describe User, feature_category: :user_profile do
expect(user.errors.messages[:email].first).to eq(expected_error)
end
it 'allows example@test.com if user is placeholder or import user' do
it 'allows example@test.com if user is placeholder, import user or security policy bot' do
placeholder_user = build(:user, :placeholder, email: "example@test.com")
import_user = build(:user, :import_user, email: "example@test.com")
security_policy_bot = build(:user, :security_policy_bot, email: "example@test.com")
expect(placeholder_user).to be_valid
expect(import_user).to be_valid
expect(security_policy_bot).to be_valid
end
it 'does not allow user to update email to a non-allowlisted domain' do
@ -1012,12 +1014,14 @@ RSpec.describe User, feature_category: :user_profile do
.to raise_error(StandardError, 'Validation failed: Email is not allowed. Please use your regular email address. Check with your administrator.')
end
it 'allows placeholder and import users to update email to a non-allowlisted domain' do
it 'allows placeholder, import users and security policy bot to update email to a non-allowlisted domain' do
placeholder_user = create(:user, :placeholder, email: "info@test.example.com")
import_user = create(:user, :import_user, email: "info2@test.example.com")
security_policy_bot = create(:user, :security_policy_bot, email: "info3@test.example.com")
expect(placeholder_user.update!(email: "test@notexample.com")).to eq(true)
expect(import_user.update!(email: "test2@notexample.com")).to eq(true)
expect(security_policy_bot.update!(email: "test3@notexample.com")).to eq(true)
end
end
@ -1176,6 +1180,16 @@ RSpec.describe User, feature_category: :user_profile do
expect(user).to be_valid
end
it 'allows placeholder, import users and security policy bot to bypass email restrictions' do
placeholder_user = build(:user, :placeholder, email: "info+1@test.com")
import_user = build(:user, :import_user, email: "info+1@test.com")
security_policy_bot = build(:user, :security_policy_bot, email: "info+1@test.com")
expect(placeholder_user).to be_valid
expect(import_user).to be_valid
expect(security_policy_bot).to be_valid
end
context 'when created_by_id is set' do
it 'does accept the email address' do
user = build(:user, email: 'info+1@test.com', created_by_id: 1)

View File

@ -169,6 +169,12 @@ RSpec.describe 'Querying CI_JOB_TOKEN allowlist for a project', feature_category
)
end
before do
allow_next_found_instance_of(Project) do |project|
allow(project).to receive(:job_token_policies_enabled?).and_return(true)
end
end
it 'returns the correct data' do
post_graphql(query, current_user: current_user)

View File

@ -85,6 +85,12 @@ RSpec.describe 'CiJobTokenScopeAddGroupOrProject', feature_category: :continuous
end
end
before do
allow_next_found_instance_of(Project) do |project|
allow(project).to receive(:job_token_policies_enabled?).and_return(true)
end
end
context 'when we add a group' do
let_it_be(:target_group) { create(:group, :private) }
let(:target_path) { target_group }

View File

@ -43,6 +43,12 @@ RSpec.describe 'CiJobTokenScopeUpdatePolicies', feature_category: :continuous_in
let(:mutation_response) { graphql_mutation_response(:ci_job_token_scope_update_policies) }
before do
allow_next_found_instance_of(Project) do |project|
allow(project).to receive(:job_token_policies_enabled?).and_return(true)
end
end
context 'when policies are updated for a target project' do
let_it_be(:target_project) { create(:project, :private) }
let_it_be(:target_path) { target_project.full_path }

View File

@ -11,6 +11,10 @@ RSpec.describe Ci::JobTokenScope::AddGroupService, feature_category: :continuous
let(:service) { described_class.new(project, current_user) }
shared_examples 'adds group' do |_context|
before do
allow(project).to receive(:job_token_policies_enabled?).and_return(true)
end
it 'adds the group to the scope', :aggregate_failures do
expect { result }.to change { Ci::JobToken::GroupScopeLink.count }.by(1)

View File

@ -10,6 +10,10 @@ RSpec.describe Ci::JobTokenScope::AddProjectService, feature_category: :continuo
let_it_be(:policies) { %w[read_containers read_packages] }
shared_examples 'adds project' do |context|
before do
allow(project).to receive(:job_token_policies_enabled?).and_return(true)
end
it 'adds the project to the scope', :aggregate_failures do
expect { result }.to change { Ci::JobToken::ProjectScopeLink.count }.by(1)

View File

@ -12,6 +12,10 @@ RSpec.describe Ci::JobTokenScope::UpdatePoliciesService, feature_category: :cont
described_class.new(project, current_user).execute(target, default_permissions, policies)
end
before do
allow(project).to receive(:job_token_policies_enabled?).and_return(true)
end
describe '#execute' do
shared_examples 'when user is not logged in' do
let(:current_user) { nil }

View File

@ -46,8 +46,8 @@ RSpec.describe WorkItems::DataSync::Widgets::Hierarchy, feature_category: :team_
# these are the newly copied child records
new_children = target_work_item.reload.work_item_children.where(moved_to_id: nil)
# these are the originally re-linked child records from source work item that are closed upon move.
moved_children = target_work_item.reload.work_item_children.where.not(moved_to_id: nil)
# these are the originally linked child records on source work item that are closed upon move.
moved_children = work_item.reload.work_item_children.where.not(moved_to_id: nil)
expect(new_children.size).to eq(2)
expect(new_children.map(&:title)).to match_array(expected_child_items_titles)
@ -62,9 +62,6 @@ RSpec.describe WorkItems::DataSync::Widgets::Hierarchy, feature_category: :team_
expect(moved_children.map(&:moved_to_id)).to match_array(new_children.map(&:id))
# new target work item and its 2 child tasks are located within new namespace
expect(target_work_item.namespace.work_items.count).to eq(3)
# child items are relinked in `after_save_commit`
expect(work_item.reload.work_item_children).to be_empty
end
end

View File

@ -8,11 +8,9 @@ RSpec::Matchers.define :be_in_sync_with_project do |project|
break true if project.new_record? && !project_namespace.present?
project_namespace.present? &&
project.name == project_namespace.name &&
project.path == project_namespace.path &&
project.namespace_id == project_namespace.parent_id &&
project.visibility_level == project_namespace.visibility_level &&
project.shared_runners_enabled == project_namespace.shared_runners_enabled
Namespaces::ProjectNamespace::SYNCED_ATTRIBUTES.all? do |attribute|
project[attribute] == project_namespace[attribute]
end
end
failure_message_when_negated do |project_namespace|

View File

@ -13,12 +13,13 @@ RSpec.shared_examples 'enforcing job token policies' do |policies, expected_succ
end
context 'when authenticating with a CI job token from another project' do
let(:source_project) { project }
let(:source_project) { project.reload }
let(:job_user) { user }
let(:target_job) { create(:ci_build, :running, user: job_user) }
let(:allowed_policies) { Array(policies) }
let(:default_permissions) { false }
let(:skip_allowlist_creation) { false }
let(:job_token_policies_enabled) { true }
let!(:features_state) do
source_project.project_feature.attributes
.slice(*::ProjectFeature::FEATURES.map { |feature| "#{feature}_access_level" })
@ -37,6 +38,10 @@ RSpec.shared_examples 'enforcing job token policies' do |policies, expected_succ
before do
# Make all project features private
enable_project_features(source_project, nil)
# Enable fine-grained job token permissions
namespace_settings = source_project.root_ancestor.namespace_settings ||
source_project.root_ancestor.build_namespace_settings
namespace_settings.update!(job_token_policies_enabled:)
end
after do
@ -56,10 +61,7 @@ RSpec.shared_examples 'enforcing job token policies' do |policies, expected_succ
# This test makes sure that endpoints for which we want to enable job token permissions
# are denied access when an allowlist entry is missing.
let(:allowlist) { nil }
before do
stub_feature_flags(add_policies_to_ci_job_token: false)
end
let(:job_token_policies_enabled) { false }
it 'denies access' do
expect(do_request).to have_gitlab_http_status(:forbidden)
@ -100,11 +102,7 @@ RSpec.shared_examples 'enforcing job token policies' do |policies, expected_succ
end
context 'when job token policies are disabled' do
before do
allow_next_found_instance_of(Project) do |project|
allow(project).to receive(:job_token_policies_enabled?).and_return(false)
end
end
let(:job_token_policies_enabled) { false }
it { is_expected.to have_gitlab_http_status(expected_success_status) }

View File

@ -240,14 +240,20 @@ RSpec.shared_examples 'cloneable and moveable widget data' do
end
let_it_be(:child_items) do
namespace_params = if original_work_item.project
[project: original_work_item.project]
else
[:group_level, { namespace: original_work_item.namespace }]
end
child_item_type1 = WorkItems::HierarchyRestriction.where(parent_type: original_work_item.work_item_type).order(
id: :asc).first.child_type.base_type
child_item_type2 = WorkItems::HierarchyRestriction.where(parent_type: original_work_item.work_item_type).order(
id: :asc).last.child_type.base_type
child_item1 = create(:work_item, child_item_type1)
child_item1 = create(:work_item, child_item_type1, *namespace_params)
create(:parent_link, work_item: child_item1, work_item_parent: original_work_item)
child_item2 = create(:work_item, child_item_type2)
child_item2 = create(:work_item, child_item_type2, *namespace_params)
create(:parent_link, work_item: child_item2, work_item_parent: original_work_item)
[child_item1, child_item2].pluck(:title)