Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-07-18 12:15:50 +00:00
parent 4e4975548a
commit ce2b1f9fcc
68 changed files with 1030 additions and 175 deletions

View File

@ -3,9 +3,6 @@
*/
export default {
files: [
'app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_comment_form.vue',
'app/assets/javascripts/admin/abuse_report/components/notes/abuse_report_edit_note.vue',
'app/assets/javascripts/admin/statistics_panel/components/app.vue',
'app/assets/javascripts/batch_comments/components/draft_note.vue',
'app/assets/javascripts/batch_comments/components/preview_item.vue',
'app/assets/javascripts/behaviors/components/json_table.vue',

View File

@ -45,8 +45,9 @@
|| $FORCE_GITLAB_CI
'
# weekly pipeline is opt-in and should not automatically run every job
.if-default-branch-refs: &if-default-branch-refs
if: '$CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH && $CI_MERGE_REQUEST_IID == null'
if: '$CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH && $CI_MERGE_REQUEST_IID == null && $SCHEDULE_TYPE != "weekly"'
# This rule ensures the job runs for push pipeline events on stable branches.
# This is used for ensuring jobs run in a pipeline triggered by mirror sync.

View File

@ -3284,7 +3284,6 @@ Gitlab/BoundedContexts:
- 'ee/app/services/llm/internal/completion_service.rb'
- 'ee/app/services/llm/merge_requests/summarize_review_service.rb'
- 'ee/app/services/llm/notes/measure_comment_temperature_service.rb'
- 'ee/app/services/llm/product_analytics/generate_cube_query_service.rb'
- 'ee/app/services/llm/resolve_vulnerability_service.rb'
- 'ee/app/services/llm/review_merge_request_service.rb'
- 'ee/app/services/llm/summarize_new_merge_request_service.rb'

View File

@ -636,7 +636,7 @@ gem 'ssh_data', '~> 1.3', feature_category: :shared
gem 'spamcheck', '~> 1.3.0', feature_category: :insider_threat
# Gitaly GRPC protocol definitions
gem 'gitaly', '~> 18.1.0.pre.rc1', feature_category: :gitaly
gem 'gitaly', '~> 18.2.0', feature_category: :gitaly
# KAS GRPC protocol definitions
gem 'gitlab-kas-grpc', '~> 18.1.0', feature_category: :deployment_management

View File

@ -215,7 +215,7 @@
{"name":"gettext","version":"3.5.1","platform":"ruby","checksum":"03ec7f71ea7e2cf1fdcd5e08682e98b81601922fdbee890b7bc6f63b0e1a512a"},
{"name":"gettext_i18n_rails","version":"1.13.0","platform":"ruby","checksum":"d4a4739d928b6ce52a2d694d33a831dcb06c7c8e197b3172fc73dfaa20ac8ee6"},
{"name":"git","version":"1.19.1","platform":"ruby","checksum":"b0a422d9f6517353c48a330d6114de4db9e0c82dbe7202964a1d9f1fbc827d70"},
{"name":"gitaly","version":"18.1.0.pre.rc1","platform":"ruby","checksum":"8f65a0c5bb3694c91c9fa4bfa7ceabfc131846b78feed8ee32a744aaacf6e70a"},
{"name":"gitaly","version":"18.2.0","platform":"ruby","checksum":"229010b9e8a9e8de213591989795df17a3bdcc01957903bd2a2f1474bbff5578"},
{"name":"gitlab","version":"4.19.0","platform":"ruby","checksum":"3f645e3e195dbc24f0834fbf83e8ccfb2056d8e9712b01a640aad418a6949679"},
{"name":"gitlab-chronic","version":"0.10.6","platform":"ruby","checksum":"a244d11a1396d2aac6ae9b2f326adf1605ec1ad20c29f06e8b672047d415a9ac"},
{"name":"gitlab-cloud-connector","version":"1.21.0","platform":"ruby","checksum":"16eb2a42f223c6c70efc20a8fd9e2bbe4fa91603894daa8a72f354f425a07709"},

View File

@ -728,7 +728,7 @@ GEM
git (1.19.1)
addressable (~> 2.8)
rchardet (~> 1.8)
gitaly (18.1.0.pre.rc1)
gitaly (18.2.0)
grpc (~> 1.0)
gitlab (4.19.0)
httparty (~> 0.20)
@ -2149,7 +2149,7 @@ DEPENDENCIES
gettext (~> 3.5, >= 3.5.1)
gettext_i18n_rails (~> 1.13.0)
git (~> 1.8)
gitaly (~> 18.1.0.pre.rc1)
gitaly (~> 18.2.0)
gitlab-active-context!
gitlab-backup-cli!
gitlab-chronic (~> 0.10.5)

View File

@ -215,7 +215,7 @@
{"name":"gettext","version":"3.5.1","platform":"ruby","checksum":"03ec7f71ea7e2cf1fdcd5e08682e98b81601922fdbee890b7bc6f63b0e1a512a"},
{"name":"gettext_i18n_rails","version":"1.13.0","platform":"ruby","checksum":"d4a4739d928b6ce52a2d694d33a831dcb06c7c8e197b3172fc73dfaa20ac8ee6"},
{"name":"git","version":"1.19.1","platform":"ruby","checksum":"b0a422d9f6517353c48a330d6114de4db9e0c82dbe7202964a1d9f1fbc827d70"},
{"name":"gitaly","version":"18.1.0.pre.rc1","platform":"ruby","checksum":"8f65a0c5bb3694c91c9fa4bfa7ceabfc131846b78feed8ee32a744aaacf6e70a"},
{"name":"gitaly","version":"18.2.0","platform":"ruby","checksum":"229010b9e8a9e8de213591989795df17a3bdcc01957903bd2a2f1474bbff5578"},
{"name":"gitlab","version":"4.19.0","platform":"ruby","checksum":"3f645e3e195dbc24f0834fbf83e8ccfb2056d8e9712b01a640aad418a6949679"},
{"name":"gitlab-chronic","version":"0.10.6","platform":"ruby","checksum":"a244d11a1396d2aac6ae9b2f326adf1605ec1ad20c29f06e8b672047d415a9ac"},
{"name":"gitlab-cloud-connector","version":"1.21.0","platform":"ruby","checksum":"16eb2a42f223c6c70efc20a8fd9e2bbe4fa91603894daa8a72f354f425a07709"},

View File

@ -722,7 +722,7 @@ GEM
git (1.19.1)
addressable (~> 2.8)
rchardet (~> 1.8)
gitaly (18.1.0.pre.rc1)
gitaly (18.2.0)
grpc (~> 1.0)
gitlab (4.19.0)
httparty (~> 0.20)
@ -2144,7 +2144,7 @@ DEPENDENCIES
gettext (~> 3.5, >= 3.5.1)
gettext_i18n_rails (~> 1.13.0)
git (~> 1.8)
gitaly (~> 18.1.0.pre.rc1)
gitaly (~> 18.2.0)
gitlab-active-context!
gitlab-backup-cli!
gitlab-chronic (~> 0.10.5)

View File

@ -129,7 +129,6 @@ export default {
<div :class="commentFormWrapperClasses" data-testid="abuse-report-comment-form-wrapper">
<abuse-report-comment-form
v-if="isEditing"
:abuse-report-id="abuseReportId"
:is-submitting="isSubmitting"
:autosave-key="autosaveKey"
:comment-button-text="commentButtonText"

View File

@ -22,10 +22,6 @@ export default {
},
inject: ['uploadNoteAttachmentPath'],
props: {
abuseReportId: {
type: String,
required: true,
},
isSubmitting: {
type: Boolean,
required: false,

View File

@ -16,15 +16,6 @@ export default {
AbuseReportCommentForm,
},
props: {
abuseReportId: {
type: String,
required: true,
},
discussionId: {
type: String,
required: false,
default: '',
},
note: {
type: Object,
required: true,
@ -96,7 +87,6 @@ export default {
<div>
<div class="flash-container"></div>
<abuse-report-comment-form
:abuse-report-id="abuseReportId"
:initial-value="note.body"
:is-submitting="isSubmitting"
:autosave-key="autosaveKey"

View File

@ -25,10 +25,6 @@ export default {
EditedAt,
},
props: {
abuseReportId: {
type: String,
required: true,
},
note: {
type: Object,
required: true,
@ -97,7 +93,6 @@ export default {
<div class="timeline-content !gl-pb-4">
<abuse-report-edit-note
v-if="isEditing"
:abuse-report-id="abuseReportId"
:note="updatedNote"
@cancelEditing="cancelEditing"
@updateNote="updateNote"

View File

@ -14,7 +14,7 @@ export default {
};
},
computed: {
...mapState(['isLoading', 'statistics']),
...mapState(['isLoading']),
...mapGetters(['getStatistics']),
},
mounted() {

View File

@ -96,4 +96,5 @@ export const COMMANDS = {
UNASSIGN_REVIEWER: '/unassign_reviewer',
UNLABEL: '/unlabel',
ITERATION: '/iteration',
UNLINK: '/unlink',
};

View File

@ -2,7 +2,7 @@ import { identity, memoize, isEmpty } from 'lodash';
import { initEmojiMap, getAllEmoji, searchEmoji } from '~/emoji';
import { newDate } from '~/lib/utils/datetime_utility';
import axios from '~/lib/utils/axios_utils';
import { currentAssignees } from '~/graphql_shared/issuable_client';
import { currentAssignees, linkedItems } from '~/graphql_shared/issuable_client';
import { COMMANDS } from '../constants';
export function defaultSorter(searchFields) {
@ -200,6 +200,34 @@ export default class AutocompleteHelper {
return true;
}),
/**
* We're overriding returned items instead of filtering out
* irrelavent items because for `/unlink #`, it should show
* all linked items at once without waiting for user to
* manually search items.
*/
issue: (items) => {
let filteredItems = items;
if (command === COMMANDS.UNLINK) {
const { workItemFullPath, workItemIid } =
this.tiptapEditor?.view.dom.closest('.js-gfm-wrapper')?.dataset || {};
if (workItemFullPath && workItemIid) {
const links = linkedItems()[`${workItemFullPath}:${workItemIid}`] || [];
filteredItems = links.map((link) => ({
id: Number(link.iid),
iid: Number(link.iid),
title: link.title,
reference: link.reference,
search: `${link.iid} ${link.title}`,
icon_name: link.workItemType.iconName,
}));
}
}
return filteredItems;
},
emoji: (_, query) =>
query
? searchEmoji(query)

View File

@ -57,7 +57,7 @@ export default {
data() {
return {
eventHub: eventHubByKey(this.queryKey),
crudComponentId: uniqueId('glql-crud-'),
crudComponentId: `glql-${this.queryKey}`,
queryModalSettings: {
id: uniqueId('glql-modal-'),
@ -302,6 +302,7 @@ export default {
:count="data.count"
is-collapsible
:collapsed="isCollapsed"
persist-collapsed-state
class="!gl-mt-5"
:body-class="{ '!gl-m-0 !gl-p-0': data.count || isPreview }"
@collapsed="isCollapsed = true"

View File

@ -69,19 +69,20 @@ export default {
<th-resizable
v-for="(field, fieldIndex) in fields"
:key="field.key"
class="gl-whitespace-nowrap !gl-border-section !gl-bg-subtle !gl-px-5 !gl-py-3 !gl-text-subtle gl-text-subtle dark:!gl-bg-strong"
class="gl-relative !gl-border-section !gl-bg-subtle !gl-p-0 !gl-text-subtle gl-text-subtle dark:!gl-bg-strong"
>
<div
:data-testid="`column-${fieldIndex}`"
class="gl-cursor-pointer"
class="gl-l-0 gl-r-0 gl-absolute gl-w-full gl-cursor-pointer gl-truncate gl-px-5 gl-py-3 gl-transition-colors hover:gl-bg-strong dark:hover:gl-bg-neutral-700"
@click="sorter.sortBy(field.key)"
>
{{ field.label }}
<gl-icon
v-if="sorter.options.fieldName === field.key"
:name="sorter.options.ascending ? 'arrow-up' : 'arrow-down'"
/>
{{ field.label }}
</div>
<div class="gl-pointer-events-none gl-py-3">&nbsp;</div>
</th-resizable>
</tr>
</thead>

View File

@ -10,12 +10,23 @@ const states = {
merged: 3,
};
const statusCategories = {
triage: 1,
to_do: 2,
in_progress: 3,
done: 4,
canceled: 5,
};
const sortFieldsByType = {
Issue: 'title',
Epic: 'title',
Milestone: 'title',
Label: 'title',
UserCore: 'username',
MergeRequestAuthor: 'username',
MergeRequestReviewer: 'username',
MergeRequestAssignee: 'username',
Project: 'nameWithNamespace',
};
function valueByType(field, type) {
@ -28,6 +39,11 @@ function valueByFieldName(fieldValue, fieldName) {
return healthStatuses[fieldValue];
case 'state':
return states[fieldValue];
case 'status':
return statusCategories[fieldValue.category];
case 'milestone':
case 'iteration':
return new Date(fieldValue.dueDate);
default:
return null;
}

View File

@ -250,8 +250,25 @@ export const config = {
const items = resultNodes
.filter((node) => node.workItem)
// normally we would only get a `__ref` for nested properties but we need to extract the full work item
// eslint-disable-next-line no-underscore-dangle
.map((node) => context.cache.extract()[node.workItem.__ref]);
.map((node) => {
/* eslint-disable no-underscore-dangle */
const itemRef = context.cache.extract()[node.workItem.__ref];
const { __typename, id, name, iconName } =
context.cache.extract()[itemRef.workItemType.__ref];
/* eslint-enable no-underscore-dangle */
const workItem = {
...itemRef,
workItemType: {
__typename,
id,
name,
iconName,
},
};
return workItem;
});
// Ensure that any existing linked items are retained
const existingLinkedItems = linkedItems();

View File

@ -144,15 +144,17 @@ export default {
},
},
mounted() {
const localStorageValue = localStorage.getItem(this.getLocalStorageKeyName());
if (this.persistCollapsedState) {
// If collapsed by default and not yet toggled.
if (this.collapsed && localStorage.getItem(this.getLocalStorageKeyName()) === null) {
if (this.collapsed && localStorageValue === null) {
this.isCollapsed = true;
}
if (localStorage.getItem(this.getLocalStorageKeyName()) === 'true') {
if (localStorageValue === 'true') {
this.$emit('collapsed');
} else {
} else if (localStorageValue) {
this.$emit('expanded');
}
}

View File

@ -278,7 +278,12 @@ export default {
);
},
showDraftStatusBadge() {
return Boolean(this.isMergeRequest && this.isOpen && this.issuable.draft);
return Boolean(
this.isMergeRequest &&
this.isOpen &&
this.issuable.draft &&
this.glFeatures.showMergeRequestStatusDraft,
);
},
statusBadgeVariant() {
if (this.isMergeRequest && this.isClosed) {

View File

@ -128,7 +128,9 @@ export default {
},
enableCheckboxes() {
if (this.canUpdate) {
const checkboxes = this.$el.querySelectorAll('.task-list-item-checkbox');
const checkboxes = this.$el.querySelectorAll(
'.task-list-item-checkbox:not([data-inapplicable])',
);
// enable boxes, disabled by default in markdown
checkboxes.forEach((checkbox) => {

View File

@ -53,9 +53,11 @@ export default {
if (!this.hasAdminNotePermission) {
return;
}
this.$el.querySelectorAll('.task-list-item-checkbox').forEach((checkbox) => {
checkbox.disabled = disabled; // eslint-disable-line no-param-reassign
});
this.$el
.querySelectorAll('.task-list-item-checkbox:not([data-inapplicable])')
.forEach((checkbox) => {
checkbox.disabled = disabled; // eslint-disable-line no-param-reassign
});
},
toggleCheckboxes(event) {
if (!this.hasAdminNotePermission) {

View File

@ -219,7 +219,9 @@ export default {
return container.firstChild;
},
initCheckboxes() {
this.checkboxes = this.$el.querySelectorAll('.task-list-item-checkbox');
this.checkboxes = this.$el.querySelectorAll(
'.task-list-item-checkbox:not([data-inapplicable])',
);
// enable boxes, disabled by default in markdown
this.disableCheckboxes(false);

View File

@ -13,7 +13,7 @@ module Projects
before_action :authorize_update_cicd_settings!, only: :update
before_action :authorize_reset_cache!, only: :reset_cache
before_action :check_builds_available!
before_action :define_variables
before_action :define_variables, only: :show
before_action do
push_frontend_feature_flag(:ci_variables_pages, current_user)

View File

@ -478,6 +478,10 @@ class User < ApplicationRecord
delegate :email_reset_offered_at, :email_reset_offered_at=, to: :user_detail, allow_nil: true
delegate :project_authorizations_recalculated_at, :project_authorizations_recalculated_at=, to: :user_detail, allow_nil: true
delegate :bot_namespace, :bot_namespace=, to: :user_detail, allow_nil: true
delegate :email_otp, :email_otp=, to: :user_detail, allow_nil: true
delegate :email_otp_required_after, :email_otp_required_after=, to: :user_detail, allow_nil: true
delegate :email_otp_last_sent_at, :email_otp_last_sent_at=, to: :user_detail, allow_nil: true
delegate :email_otp_last_sent_to, :email_otp_last_sent_to=, to: :user_detail, allow_nil: true
accepts_nested_attributes_for :user_preference, update_only: true
accepts_nested_attributes_for :user_detail, update_only: true

View File

@ -11,6 +11,9 @@ class UserDetail < ApplicationRecord
validates :job_title, length: { maximum: 200 }
validates :bio, length: { maximum: 255 }, allow_blank: true
validates :email_otp, length: { is: 64 }, allow_nil: true
validates :email_otp_last_sent_to, length: { maximum: 511 }, allow_nil: true
validate :bot_namespace_user_type, if: :bot_namespace_id_changed?
ignore_column :registration_objective, remove_after: '2025-07-17', remove_with: '18.2'
@ -82,6 +85,15 @@ class UserDetail < ApplicationRecord
end
end
# Exclude the hashed email_otp attribute
def serializable_hash(options = nil)
options = options.try(:dup) || {}
options[:except] = Array(options[:except]).dup
options[:except].concat [:email_otp]
super
end
private
def prevent_nil_fields

View File

@ -335,12 +335,12 @@ CREATE TABLE hierarchy_work_items
`work_item_type_id` Int64,
`namespace_id` Int64,
`start_date` Nullable(Date32),
`label_ids` Array(Int64) DEFAULT [],
`assignee_ids` Array(Int64) DEFAULT [],
`custom_status_id` Int64,
`system_defined_status_id` Int64,
`version` DateTime64(6, 'UTC') DEFAULT now(),
`deleted` Bool DEFAULT false
`deleted` Bool DEFAULT false,
`label_ids` String DEFAULT '',
`assignee_ids` String DEFAULT ''
)
ENGINE = ReplacingMergeTree(version, deleted)
PRIMARY KEY (traversal_path, work_item_type_id, id)
@ -1263,8 +1263,8 @@ CREATE MATERIALIZED VIEW hierarchy_work_items_mv TO hierarchy_work_items
`work_item_type_id` Int64,
`namespace_id` Int64,
`start_date` Nullable(Date32),
`label_ids` Array(Int64),
`assignee_ids` Array(Int64),
`label_ids` String,
`assignee_ids` String,
`custom_status_id` Int64,
`system_defined_status_id` Int64,
`version` DateTime64(6, 'UTC'),
@ -1298,7 +1298,7 @@ AS WITH
(
SELECT
work_item_id,
arraySort(groupArray(label_id)) AS label_ids
concat('/', arrayStringConcat(arraySort(groupArray(label_id)), '/'), '/') AS label_ids
FROM
(
SELECT
@ -1323,7 +1323,7 @@ AS WITH
(
SELECT
issue_id,
arraySort(groupArray(user_id)) AS user_ids
concat('/', arrayStringConcat(arraySort(groupArray(user_id)), '/'), '/') AS user_ids
FROM
(
SELECT

View File

@ -0,0 +1,127 @@
# frozen_string_literal: true
class RemoveHierarchyWorkItemMv < ClickHouse::Migration
def up
execute <<-SQL
DROP VIEW IF EXISTS hierarchy_work_items_mv
SQL
end
def down
execute <<-SQL
CREATE MATERIALIZED VIEW IF NOT EXISTS hierarchy_work_items_mv TO hierarchy_work_items
AS WITH
cte AS
(
SELECT *
FROM siphon_issues
),
namespace_paths AS
(
-- look up `traversal_path` values
SELECT * FROM (
SELECT
id,
argMax(traversal_path, version) AS traversal_path,
argMax(deleted, version) AS deleted
FROM namespace_traversal_paths
WHERE id IN (
SELECT DISTINCT namespace_id
FROM cte
)
GROUP BY id
) WHERE deleted = false
),
collected_label_ids AS
(
SELECT work_item_id, arraySort(groupArray(label_id)) AS label_ids
FROM (
SELECT
work_item_id,
label_id,
id,
argMax(deleted, version) AS deleted
FROM work_item_label_links
WHERE work_item_id IN (SELECT id FROM cte)
GROUP BY work_item_id, label_id, id
) WHERE deleted = false
GROUP BY work_item_id
),
collected_assignee_ids AS
(
SELECT issue_id, arraySort(groupArray(user_id)) AS user_ids
FROM (
SELECT
issue_id,
user_id,
argMax(_siphon_deleted, _siphon_replicated_at) AS _siphon_deleted
FROM siphon_issue_assignees
WHERE issue_id IN (SELECT id FROM cte)
GROUP BY issue_id, user_id
) WHERE _siphon_deleted = false
GROUP BY issue_id
),
collected_custom_status_records AS
(
SELECT work_item_id, max(system_defined_status_id) AS system_defined_status_id, max(custom_status_id) AS custom_status_id
FROM (
SELECT
work_item_id,
id,
argMax(system_defined_status_id, _siphon_replicated_at) AS system_defined_status_id,
argMax(custom_status_id, _siphon_replicated_at) AS custom_status_id,
argMax(_siphon_deleted, _siphon_replicated_at) AS _siphon_deleted
FROM siphon_work_item_current_statuses
GROUP BY work_item_id, id
) WHERE _siphon_deleted = false
GROUP BY work_item_id
),
finalized AS
(
SELECT
-- handle the case where namespace_id is null
multiIf(cte.namespace_id != 0, namespace_paths.traversal_path, '0/') AS traversal_path,
cte.id AS id,
cte.title,
cte.author_id,
cte.created_at,
cte.updated_at,
cte.milestone_id,
cte.iid,
cte.updated_by_id,
cte.weight,
cte.confidential,
cte.due_date,
cte.moved_to_id,
cte.time_estimate,
cte.relative_position,
cte.last_edited_at,
cte.last_edited_by_id,
cte.closed_at,
cte.closed_by_id,
cte.state_id,
cte.duplicated_to_id,
cte.promoted_to_epic_id,
cte.health_status,
cte.sprint_id,
cte.blocking_issues_count,
cte.upvotes_count,
cte.work_item_type_id,
cte.namespace_id,
cte.start_date,
collected_label_ids.label_ids AS label_ids,
collected_assignee_ids.user_ids AS assignee_ids,
collected_custom_status_records.custom_status_id AS custom_status_id,
collected_custom_status_records.system_defined_status_id AS system_defined_status_id,
cte._siphon_replicated_at AS version,
cte._siphon_deleted AS deleted
FROM cte
LEFT JOIN namespace_paths ON namespace_paths.id = cte.namespace_id
LEFT JOIN collected_assignee_ids ON collected_assignee_ids.issue_id = cte.id
LEFT JOIN collected_label_ids ON collected_label_ids.work_item_id = cte.id
LEFT JOIN collected_custom_status_records ON collected_custom_status_records.work_item_id = cte.id
)
SELECT * FROM finalized
SQL
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AdjustHierarchyWorkItemTable < ClickHouse::Migration
def up
execute 'ALTER TABLE hierarchy_work_items DROP COLUMN label_ids'
execute 'ALTER TABLE hierarchy_work_items DROP COLUMN assignee_ids'
execute "ALTER TABLE hierarchy_work_items ADD COLUMN label_ids String DEFAULT ''"
execute "ALTER TABLE hierarchy_work_items ADD COLUMN assignee_ids String DEFAULT ''"
end
def down
execute 'ALTER TABLE hierarchy_work_items DROP COLUMN label_ids'
execute 'ALTER TABLE hierarchy_work_items DROP COLUMN assignee_ids'
execute 'ALTER TABLE hierarchy_work_items ADD COLUMN label_ids Array(Int64) DEFAULT []'
execute 'ALTER TABLE hierarchy_work_items ADD COLUMN assignee_ids Array(Int64) DEFAULT []'
end
end

View File

@ -0,0 +1,127 @@
# frozen_string_literal: true
class ReAddWorkItemHierarchyMv < ClickHouse::Migration
def up
execute <<-SQL
CREATE MATERIALIZED VIEW IF NOT EXISTS hierarchy_work_items_mv TO hierarchy_work_items
AS WITH
cte AS
(
SELECT *
FROM siphon_issues
),
namespace_paths AS
(
-- look up `traversal_path` values
SELECT * FROM (
SELECT
id,
argMax(traversal_path, version) AS traversal_path,
argMax(deleted, version) AS deleted
FROM namespace_traversal_paths
WHERE id IN (
SELECT DISTINCT namespace_id
FROM cte
)
GROUP BY id
) WHERE deleted = false
),
collected_label_ids AS
(
SELECT work_item_id, concat('/', arrayStringConcat(arraySort(groupArray(label_id)), '/'), '/') AS label_ids
FROM (
SELECT
work_item_id,
label_id,
id,
argMax(deleted, version) AS deleted
FROM work_item_label_links
WHERE work_item_id IN (SELECT id FROM cte)
GROUP BY work_item_id, label_id, id
) WHERE deleted = false
GROUP BY work_item_id
),
collected_assignee_ids AS
(
SELECT issue_id, concat('/', arrayStringConcat(arraySort(groupArray(user_id)), '/'), '/') AS user_ids
FROM (
SELECT
issue_id,
user_id,
argMax(_siphon_deleted, _siphon_replicated_at) AS _siphon_deleted
FROM siphon_issue_assignees
WHERE issue_id IN (SELECT id FROM cte)
GROUP BY issue_id, user_id
) WHERE _siphon_deleted = false
GROUP BY issue_id
),
collected_custom_status_records AS
(
SELECT work_item_id, max(system_defined_status_id) AS system_defined_status_id, max(custom_status_id) AS custom_status_id
FROM (
SELECT
work_item_id,
id,
argMax(system_defined_status_id, _siphon_replicated_at) AS system_defined_status_id,
argMax(custom_status_id, _siphon_replicated_at) AS custom_status_id,
argMax(_siphon_deleted, _siphon_replicated_at) AS _siphon_deleted
FROM siphon_work_item_current_statuses
GROUP BY work_item_id, id
) WHERE _siphon_deleted = false
GROUP BY work_item_id
),
finalized AS
(
SELECT
-- handle the case where namespace_id is null
multiIf(cte.namespace_id != 0, namespace_paths.traversal_path, '0/') AS traversal_path,
cte.id AS id,
cte.title,
cte.author_id,
cte.created_at,
cte.updated_at,
cte.milestone_id,
cte.iid,
cte.updated_by_id,
cte.weight,
cte.confidential,
cte.due_date,
cte.moved_to_id,
cte.time_estimate,
cte.relative_position,
cte.last_edited_at,
cte.last_edited_by_id,
cte.closed_at,
cte.closed_by_id,
cte.state_id,
cte.duplicated_to_id,
cte.promoted_to_epic_id,
cte.health_status,
cte.sprint_id,
cte.blocking_issues_count,
cte.upvotes_count,
cte.work_item_type_id,
cte.namespace_id,
cte.start_date,
collected_label_ids.label_ids AS label_ids,
collected_assignee_ids.user_ids AS assignee_ids,
collected_custom_status_records.custom_status_id AS custom_status_id,
collected_custom_status_records.system_defined_status_id AS system_defined_status_id,
cte._siphon_replicated_at AS version,
cte._siphon_deleted AS deleted
FROM cte
LEFT JOIN namespace_paths ON namespace_paths.id = cte.namespace_id
LEFT JOIN collected_assignee_ids ON collected_assignee_ids.issue_id = cte.id
LEFT JOIN collected_label_ids ON collected_label_ids.work_item_id = cte.id
LEFT JOIN collected_custom_status_records ON collected_custom_status_records.work_item_id = cte.id
)
SELECT * FROM finalized
SQL
end
def down
execute <<-SQL
DROP VIEW IF EXISTS hierarchy_work_items_mv
SQL
end
end

View File

@ -0,0 +1 @@
b8bc16a34499e5324c63cd8d7e2c8602ded1275e6d9986f04b1dffc66088af5e

View File

@ -0,0 +1 @@
0a43a27e3f3341bd203585ba655c4539fbb1339fb8045d8064ec5d467a2b6a02

View File

@ -0,0 +1 @@
36805dbb7d0c0a1b5558b62dca256d6317a36c50b3ae4af837eefcabc0978946

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
class AddEmailOtpAttributesToUserDetails < Gitlab::Database::Migration[2.3]
milestone '18.3'
disable_ddl_transaction!
def up
with_lock_retries do
# Add a comment to signal the limit will need to be increased if
# the hashing algorithm is changed.
add_column :user_details, :email_otp, :text, if_not_exists: true, comment: 'SHA256 hash (64 hex characters)'
add_column :user_details, :email_otp_last_sent_to, :text, if_not_exists: true
add_column :user_details, :email_otp_last_sent_at, :datetime_with_timezone, if_not_exists: true
add_column :user_details, :email_otp_required_after, :datetime_with_timezone, if_not_exists: true
end
add_text_limit :user_details, :email_otp, 64
add_text_limit :user_details, :email_otp_last_sent_to, 511
end
def down
remove_text_limit :user_details, :email_otp_last_sent_to
remove_text_limit :user_details, :email_otp
with_lock_retries do
remove_column :user_details, :email_otp_required_after
remove_column :user_details, :email_otp_last_sent_at
remove_column :user_details, :email_otp_last_sent_to
remove_column :user_details, :email_otp
end
end
end

View File

@ -0,0 +1,95 @@
# frozen_string_literal: true
class UpdateInsertVulnerabilityReadsFunction < Gitlab::Database::Migration[2.3]
milestone '18.3'
def up
execute <<~SQL
CREATE OR REPLACE FUNCTION insert_vulnerability_reads_from_vulnerability() RETURNS trigger
LANGUAGE plpgsql
AS $$
DECLARE
scanner_id bigint;
uuid uuid;
location_image text;
cluster_agent_id text;
casted_cluster_agent_id bigint;
has_issues boolean;
has_merge_request boolean;
BEGIN
IF (SELECT current_setting('vulnerability_management.dont_execute_db_trigger', true) = 'true') THEN
RETURN NULL;
END IF;
SELECT
v_o.scanner_id, v_o.uuid, v_o.location->>'image', v_o.location->'kubernetes_resource'->>'agent_id', CAST(v_o.location->'kubernetes_resource'->>'agent_id' AS bigint)
INTO
scanner_id, uuid, location_image, cluster_agent_id, casted_cluster_agent_id
FROM
vulnerability_occurrences v_o
WHERE
v_o.vulnerability_id = NEW.id
LIMIT 1;
SELECT
EXISTS (SELECT 1 FROM vulnerability_issue_links WHERE vulnerability_issue_links.vulnerability_id = NEW.id)
INTO
has_issues;
SELECT
EXISTS (SELECT 1 FROM vulnerability_merge_request_links WHERE vulnerability_merge_request_links.vulnerability_id = NEW.id)
INTO
has_merge_request;
INSERT INTO vulnerability_reads (vulnerability_id, project_id, scanner_id, report_type, severity, state, resolved_on_default_branch, uuid, location_image, cluster_agent_id, casted_cluster_agent_id, has_issues, has_merge_request)
VALUES (NEW.id, NEW.project_id, scanner_id, NEW.report_type, NEW.severity, NEW.state, NEW.resolved_on_default_branch, uuid::uuid, location_image, cluster_agent_id, casted_cluster_agent_id, has_issues, has_merge_request)
ON CONFLICT(vulnerability_id) DO NOTHING;
RETURN NULL;
END
$$;
SQL
end
def down
execute <<~SQL
CREATE OR REPLACE FUNCTION insert_vulnerability_reads_from_vulnerability() RETURNS trigger
LANGUAGE plpgsql
AS $$
DECLARE
scanner_id bigint;
uuid uuid;
location_image text;
cluster_agent_id text;
casted_cluster_agent_id bigint;
has_issues boolean;
has_merge_request boolean;
BEGIN
SELECT
v_o.scanner_id, v_o.uuid, v_o.location->>'image', v_o.location->'kubernetes_resource'->>'agent_id', CAST(v_o.location->'kubernetes_resource'->>'agent_id' AS bigint)
INTO
scanner_id, uuid, location_image, cluster_agent_id, casted_cluster_agent_id
FROM
vulnerability_occurrences v_o
WHERE
v_o.vulnerability_id = NEW.id
LIMIT 1;
SELECT
EXISTS (SELECT 1 FROM vulnerability_issue_links WHERE vulnerability_issue_links.vulnerability_id = NEW.id)
INTO
has_issues;
SELECT
EXISTS (SELECT 1 FROM vulnerability_merge_request_links WHERE vulnerability_merge_request_links.vulnerability_id = NEW.id)
INTO
has_merge_request;
INSERT INTO vulnerability_reads (vulnerability_id, project_id, scanner_id, report_type, severity, state, resolved_on_default_branch, uuid, location_image, cluster_agent_id, casted_cluster_agent_id, has_issues, has_merge_request)
VALUES (NEW.id, NEW.project_id, scanner_id, NEW.report_type, NEW.severity, NEW.state, NEW.resolved_on_default_branch, uuid::uuid, location_image, cluster_agent_id, casted_cluster_agent_id, has_issues, has_merge_request)
ON CONFLICT(vulnerability_id) DO NOTHING;
RETURN NULL;
END
$$;
SQL
end
end

View File

@ -0,0 +1,117 @@
# frozen_string_literal: true
class UpdateInsertOrUpdateVulnerabilityReadsFunction < Gitlab::Database::Migration[2.3]
milestone '18.3'
def up
execute <<~SQL
CREATE OR REPLACE FUNCTION insert_or_update_vulnerability_reads() RETURNS trigger
LANGUAGE plpgsql
AS $$
DECLARE
severity smallint;
state smallint;
report_type smallint;
resolved_on_default_branch boolean;
present_on_default_branch boolean;
has_issues boolean;
has_merge_request boolean;
BEGIN
IF (SELECT current_setting('vulnerability_management.dont_execute_db_trigger', true) = 'true') THEN
RETURN NULL;
END IF;
IF (NEW.vulnerability_id IS NULL AND (TG_OP = 'INSERT' OR TG_OP = 'UPDATE')) THEN
RETURN NULL;
END IF;
IF (TG_OP = 'UPDATE' AND OLD.vulnerability_id IS NOT NULL AND NEW.vulnerability_id IS NOT NULL) THEN
RETURN NULL;
END IF;
SELECT
vulnerabilities.severity, vulnerabilities.state, vulnerabilities.report_type, vulnerabilities.resolved_on_default_branch, vulnerabilities.present_on_default_branch
INTO
severity, state, report_type, resolved_on_default_branch, present_on_default_branch
FROM
vulnerabilities
WHERE
vulnerabilities.id = NEW.vulnerability_id;
IF present_on_default_branch IS NOT true THEN
RETURN NULL;
END IF;
SELECT
EXISTS (SELECT 1 FROM vulnerability_issue_links WHERE vulnerability_issue_links.vulnerability_id = NEW.vulnerability_id)
INTO
has_issues;
SELECT
EXISTS (SELECT 1 FROM vulnerability_merge_request_links WHERE vulnerability_merge_request_links.vulnerability_id = NEW.vulnerability_id)
INTO
has_merge_request;
INSERT INTO vulnerability_reads (vulnerability_id, project_id, scanner_id, report_type, severity, state, resolved_on_default_branch, uuid, location_image, cluster_agent_id, casted_cluster_agent_id, has_issues, has_merge_request)
VALUES (NEW.vulnerability_id, NEW.project_id, NEW.scanner_id, report_type, severity, state, resolved_on_default_branch, NEW.uuid::uuid, NEW.location->>'image', NEW.location->'kubernetes_resource'->>'agent_id', CAST(NEW.location->'kubernetes_resource'->>'agent_id' AS bigint), has_issues, has_merge_request)
ON CONFLICT(vulnerability_id) DO NOTHING;
RETURN NULL;
END
$$;
SQL
end
def down
execute <<~SQL
CREATE OR REPLACE FUNCTION insert_or_update_vulnerability_reads() RETURNS trigger
LANGUAGE plpgsql
AS $$
DECLARE
severity smallint;
state smallint;
report_type smallint;
resolved_on_default_branch boolean;
present_on_default_branch boolean;
has_issues boolean;
has_merge_request boolean;
BEGIN
IF (NEW.vulnerability_id IS NULL AND (TG_OP = 'INSERT' OR TG_OP = 'UPDATE')) THEN
RETURN NULL;
END IF;
IF (TG_OP = 'UPDATE' AND OLD.vulnerability_id IS NOT NULL AND NEW.vulnerability_id IS NOT NULL) THEN
RETURN NULL;
END IF;
SELECT
vulnerabilities.severity, vulnerabilities.state, vulnerabilities.report_type, vulnerabilities.resolved_on_default_branch, vulnerabilities.present_on_default_branch
INTO
severity, state, report_type, resolved_on_default_branch, present_on_default_branch
FROM
vulnerabilities
WHERE
vulnerabilities.id = NEW.vulnerability_id;
IF present_on_default_branch IS NOT true THEN
RETURN NULL;
END IF;
SELECT
EXISTS (SELECT 1 FROM vulnerability_issue_links WHERE vulnerability_issue_links.vulnerability_id = NEW.vulnerability_id)
INTO
has_issues;
SELECT
EXISTS (SELECT 1 FROM vulnerability_merge_request_links WHERE vulnerability_merge_request_links.vulnerability_id = NEW.vulnerability_id)
INTO
has_merge_request;
INSERT INTO vulnerability_reads (vulnerability_id, project_id, scanner_id, report_type, severity, state, resolved_on_default_branch, uuid, location_image, cluster_agent_id, casted_cluster_agent_id, has_issues, has_merge_request)
VALUES (NEW.vulnerability_id, NEW.project_id, NEW.scanner_id, report_type, severity, state, resolved_on_default_branch, NEW.uuid::uuid, NEW.location->>'image', NEW.location->'kubernetes_resource'->>'agent_id', CAST(NEW.location->'kubernetes_resource'->>'agent_id' AS bigint), has_issues, has_merge_request)
ON CONFLICT(vulnerability_id) DO NOTHING;
RETURN NULL;
END
$$;
SQL
end
end

View File

@ -0,0 +1 @@
b144d520342fe93eec5ff244e0b20ff94a5f2c0309d050f0bf568feeffd52c5a

View File

@ -0,0 +1 @@
171b688f2fdc186b86c798aa982bd9aea313f84e4303878a2b420e1a03d7b65a

View File

@ -0,0 +1 @@
88003b5383ec342db49271677ff43c5563988f8cfc8dc2c7a8db8d636573da1f

View File

@ -628,6 +628,10 @@ DECLARE
has_issues boolean;
has_merge_request boolean;
BEGIN
IF (SELECT current_setting('vulnerability_management.dont_execute_db_trigger', true) = 'true') THEN
RETURN NULL;
END IF;
IF (NEW.vulnerability_id IS NULL AND (TG_OP = 'INSERT' OR TG_OP = 'UPDATE')) THEN
RETURN NULL;
END IF;
@ -689,6 +693,10 @@ DECLARE
has_issues boolean;
has_merge_request boolean;
BEGIN
IF (SELECT current_setting('vulnerability_management.dont_execute_db_trigger', true) = 'true') THEN
RETURN NULL;
END IF;
SELECT
v_o.scanner_id, v_o.uuid, v_o.location->>'image', v_o.location->'kubernetes_resource'->>'agent_id', CAST(v_o.location->'kubernetes_resource'->>'agent_id' AS bigint)
INTO
@ -24871,10 +24879,15 @@ CREATE TABLE user_details (
bot_namespace_id bigint,
orcid text DEFAULT ''::text NOT NULL,
github text DEFAULT ''::text NOT NULL,
email_otp text,
email_otp_last_sent_to text,
email_otp_last_sent_at timestamp with time zone,
email_otp_required_after timestamp with time zone,
CONSTRAINT check_18a53381cd CHECK ((char_length(bluesky) <= 256)),
CONSTRAINT check_245664af82 CHECK ((char_length(webauthn_xid) <= 100)),
CONSTRAINT check_444573ee52 CHECK ((char_length(skype) <= 500)),
CONSTRAINT check_466a25be35 CHECK ((char_length(twitter) <= 500)),
CONSTRAINT check_4925cf9fd2 CHECK ((char_length(email_otp_last_sent_to) <= 511)),
CONSTRAINT check_4ef1de1a15 CHECK ((char_length(discord) <= 500)),
CONSTRAINT check_7b246dad73 CHECK ((char_length(organization) <= 500)),
CONSTRAINT check_7d6489f8f3 CHECK ((char_length(linkedin) <= 500)),
@ -24883,6 +24896,7 @@ CREATE TABLE user_details (
CONSTRAINT check_99b0365865 CHECK ((char_length(orcid) <= 256)),
CONSTRAINT check_a73b398c60 CHECK ((char_length(phone) <= 50)),
CONSTRAINT check_bbe110f371 CHECK ((char_length(github) <= 500)),
CONSTRAINT check_ec514a06ad CHECK ((char_length(email_otp) <= 64)),
CONSTRAINT check_eeeaf8d4f0 CHECK ((char_length(pronouns) <= 50)),
CONSTRAINT check_f1a8a05b9a CHECK ((char_length(mastodon) <= 500)),
CONSTRAINT check_f932ed37db CHECK ((char_length(pronunciation) <= 255))
@ -24892,6 +24906,8 @@ COMMENT ON COLUMN user_details.phone IS 'JiHu-specific column';
COMMENT ON COLUMN user_details.password_last_changed_at IS 'JiHu-specific column';
COMMENT ON COLUMN user_details.email_otp IS 'SHA256 hash (64 hex characters)';
CREATE SEQUENCE user_details_user_id_seq
START WITH 1
INCREMENT BY 1

View File

@ -2168,7 +2168,6 @@ Input type: `AiActionInput`
| <a id="mutationaiactiondescriptioncomposer"></a>`descriptionComposer` | [`AiDescriptionComposerInput`](#aidescriptioncomposerinput) | Input for description_composer AI action. |
| <a id="mutationaiactionexplainvulnerability"></a>`explainVulnerability` | [`AiExplainVulnerabilityInput`](#aiexplainvulnerabilityinput) | Input for explain_vulnerability AI action. |
| <a id="mutationaiactiongeneratecommitmessage"></a>`generateCommitMessage` | [`AiGenerateCommitMessageInput`](#aigeneratecommitmessageinput) | Input for generate_commit_message AI action. |
| <a id="mutationaiactiongeneratecubequery"></a>`generateCubeQuery` | [`AiGenerateCubeQueryInput`](#aigeneratecubequeryinput) | Input for generate_cube_query AI action. |
| <a id="mutationaiactiongeneratedescription"></a>`generateDescription` | [`AiGenerateDescriptionInput`](#aigeneratedescriptioninput) | Input for generate_description AI action. |
| <a id="mutationaiactionmeasurecommenttemperature"></a>`measureCommentTemperature` | [`AiMeasureCommentTemperatureInput`](#aimeasurecommenttemperatureinput) | Input for measure_comment_temperature AI action. |
| <a id="mutationaiactionplatformorigin"></a>`platformOrigin` | [`String`](#string) | Specifies the origin platform of the request. |
@ -51966,15 +51965,6 @@ see the associated mutation type above.
| ---- | ---- | ----------- |
| <a id="aigeneratecommitmessageinputresourceid"></a>`resourceId` | [`AiModelID!`](#aimodelid) | Global ID of the resource to mutate. |
### `AiGenerateCubeQueryInput`
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="aigeneratecubequeryinputquestion"></a>`question` | [`String!`](#string) | Question to ask a project's data. |
| <a id="aigeneratecubequeryinputresourceid"></a>`resourceId` | [`AiModelID!`](#aimodelid) | Global ID of the resource to mutate. |
### `AiGenerateDescriptionInput`
#### Arguments

View File

@ -53,7 +53,8 @@ Supported attributes:
Example request:
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/:id/related_epic_links"
curl --header "PRIVATE-TOKEN: <your_access_token>" \
--url "https://gitlab.example.com/api/v4/groups/:id/related_epic_links"
```
Example response:
@ -171,7 +172,8 @@ Supported attributes:
Example request:
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/:id/epics/:epic_iid/related_epics"
curl --header "PRIVATE-TOKEN: <your_access_token>" \
--url "https://gitlab.example.com/api/v4/groups/:id/epics/:epic_iid/related_epics"
```
Example response:
@ -256,7 +258,9 @@ Supported attributes:
Example request:
```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/26/epics/1/related_epics?target_group_id=26&target_epic_iid=5"
curl --request POST \
--header "PRIVATE-TOKEN: <your_access_token>" \
--url "https://gitlab.example.com/api/v4/groups/26/epics/1/related_epics?target_group_id=26&target_epic_iid=5"
```
Example response:
@ -379,7 +383,9 @@ Supported attributes:
Example request:
```shell
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/26/epics/1/related_epics/1"
curl --request DELETE \
--header "PRIVATE-TOKEN: <your_access_token>" \
--url "https://gitlab.example.com/api/v4/groups/26/epics/1/related_epics/1"
```
Example response:

View File

@ -336,3 +336,53 @@ For more information, see:
- [Functional patterns](https://gitlab.com/gitlab-org/gitlab/-/tree/master/ee/lib/remote_development#functional-patterns)
- [Railway-oriented programming and the `Result` class](https://gitlab.com/gitlab-org/gitlab/-/tree/master/ee/lib/remote_development#railway-oriented-programming-and-the-result-class)
## ApplicationRecord / ActiveRecord model scopes
When creating a new scope, consider the following prefixes.
### `for_`
For scopes which filter `where(belongs_to: record)`.
For example:
```ruby
scope :for_project, ->(project) { where(project: project) }
Timelogs.for_project(project)
```
### `with_`
For scopes which `joins`, `includes`, or filters `where(has_one: record)` or `where(has_many: record)` or `where(boolean condition)`
For example:
```ruby
scope :with_labels, -> { includes(:labels) }
AbuseReport.with_labels
scope :with_status, ->(status) { where(status: status) }
Clusters::AgentToken.with_status(:active)
scope :with_due_date, -> { where.not(due_date: nil) }
Issue.with_due_date
```
It is also fine to use custom scope names, for example:
```ruby
scope :undeleted, -> { where('policy_index >= 0') }
Security::Policy.undeleted
```
### `order_by_`
For scopes which `order`.
For example:
```ruby
scope :order_by_name, -> { order(:name) }
Namespace.order_by_name
scope :order_by_updated_at, ->(direction = :asc) { order(updated_at: direction) }
Project.order_by_updated_at(:desc)
```

View File

@ -70,6 +70,7 @@ Prerequisites:
To use Agentic Chat in the GitLab UI:
1. Go to a project in a group that meets the prerequisites.
1. In the upper-right corner, select **GitLab Duo Chat**. A drawer opens on the right side of your screen.
1. Under the chat text box, turn on the **Agentic mode (Beta)** toggle.
1. Enter your question in the chat text box and press <kbd>Enter</kbd> or select **Send**. It may take a few seconds for the interactive AI chat to produce an answer.

View File

@ -4,11 +4,13 @@ module Gitlab
module ApplicationRateLimiter
class IncrementResourceUsagePerAction < BaseStrategy
def initialize(key)
@usage = ::Gitlab::SafeRequestStore[key].to_f
@usage = ::Gitlab::SafeRequestStore[key.to_sym].to_f
end
def increment(cache_key, expiry)
if Feature.enabled?(:optimize_rate_limiter_redis_expiry, :instance)
return 0 if @usage == 0
with_redis do |redis|
new_value = redis.incrbyfloat(cache_key, @usage)

View File

@ -6,6 +6,7 @@ module Gitlab
module Security
class Finding
include ::VulnerabilityFindingHelpers
include Gitlab::Utils::StrongMemoize
attr_reader :confidence
attr_reader :identifiers
@ -187,6 +188,16 @@ module Gitlab
location_fingerprints.first
end
def owasp_top_10
extract_owasp_top_10
end
strong_memoize_attr :owasp_top_10
def has_vulnerability_resolution?
extract_vulnerability_resolution
end
strong_memoize_attr :has_vulnerability_resolution?
private
def location_fingerprints
@ -199,6 +210,45 @@ module Gitlab
signatures.sort_by { |sig| -sig.priority }.map(&:signature_hex)
end
def extract_owasp_top_10
owasp_identifier = identifiers.find { |id| id.external_type.casecmp?('owasp') }
return ::Vulnerabilities::Read::OWASP_TOP_10_DEFAULT unless owasp_identifier
map_owasp_external_id(owasp_identifier.external_id)
end
def map_owasp_external_id(external_id)
default_value = ::Vulnerabilities::Read::OWASP_TOP_10_DEFAULT
return default_value unless valid_owasp_external_id?(external_id)
::Enums::Vulnerability.owasp_top_10.keys.find { |key| key.include?(external_id) } ||
default_value
end
def valid_owasp_external_id?(external_id)
arr = external_id.split(':')
priority_label = arr.first
year = arr.second ? arr.second[0..3] : nil
return false if year.nil? || ::Enums::Vulnerability.owasp_years.exclude?(year)
Enums::Vulnerability.owasp_categories.include?(priority_label)
end
def extract_vulnerability_resolution
report_type_str = report_type.to_s
cwe_identifier = identifiers.find { |id| id.external_type == 'cwe' }
return false unless cwe_identifier
cwe_value = cwe_identifier.name
return false unless cwe_value
::Vulnerabilities::Finding::AI_ALLOWED_REPORT_TYPES.include?(report_type_str) &&
::Vulnerabilities::Finding::HIGH_CONFIDENCE_AI_RESOLUTION_CWES.include?(cwe_value&.upcase)
end
end
end
end

View File

@ -34,6 +34,8 @@ module Gitlab
files = query.filter_results(files) if query.filters.any?
files
rescue Gitlab::Git::Repository::NoRepository
[]
end
private

View File

@ -1119,6 +1119,8 @@ module Gitlab
gitaly_repository_client.search_files_by_regexp(ref, filter, limit: limit, offset: offset).map do |file|
Gitlab::EncodingHelper.encode_utf8(file)
end
rescue GRPC::NotFound
raise NoRepository
end
def find_commits_by_message(query, ref, path, limit, offset)

View File

@ -7,7 +7,7 @@ module Gitlab
DEFAULT_SIDEKIQ_LIMITS = {
main_db_duration_limit_per_worker: {
resource_key: 'db_main_duration_s',
resource_key: :db_main_duration_s,
metadata: {
db_config_name: 'main'
},
@ -28,7 +28,7 @@ module Gitlab
]
},
ci_db_duration_limit_per_worker: {
resource_key: 'db_ci_duration_s',
resource_key: :db_ci_duration_s,
metadata: {
db_config_name: 'ci'
},
@ -49,7 +49,7 @@ module Gitlab
]
},
sec_db_duration_limit_per_worker: {
resource_key: 'db_sec_duration_s',
resource_key: :db_sec_duration_s,
metadata: {
db_config_name: 'sec'
},
@ -72,7 +72,7 @@ module Gitlab
}.freeze
# name - <String> name of the limit to be used in ApplicationRateLimiter
# resource_key - <String> Key in SafeRequestStore which tracks a resource usage
# resource_key - <Symbol> Key in SafeRequestStore which tracks a resource usage
# scopes - <String> Key in ApplicationContext or the worker_name
# metadata - <Hash> Hash containing metadata for various usage, e.g. emitting extra logs/metrics
# or further logic checks before throttling.

View File

@ -2430,9 +2430,15 @@ msgstr ""
msgid "AICatalog|Agent"
msgstr ""
msgid "AICatalog|Agent deleted successfully."
msgstr ""
msgid "AICatalog|Agents"
msgstr ""
msgid "AICatalog|Are you sure you want to delete agent %{name}?"
msgstr ""
msgid "AICatalog|Briefly describe what this agent is designed to do and its key capabilities."
msgstr ""
@ -2451,6 +2457,9 @@ msgstr ""
msgid "AICatalog|Define the agent's personality, expertise, and behavioral guidelines. This shapes how the agent responds and approaches tasks."
msgstr ""
msgid "AICatalog|Delete agent"
msgstr ""
msgid "AICatalog|Description is required."
msgstr ""
@ -2460,6 +2469,9 @@ msgstr ""
msgid "AICatalog|Edit agent"
msgstr ""
msgid "AICatalog|Failed to delete agent. %{error}"
msgstr ""
msgid "AICatalog|Failed to run agent."
msgstr ""
@ -6108,6 +6120,9 @@ msgstr ""
msgid "AiPowered|Get started with GitLab Duo Core"
msgstr ""
msgid "AiPowered|GitLab Duo Agent Platform"
msgstr ""
msgid "AiPowered|GitLab Duo Agent Platform is now on for the instance and the service account (%{accountId}) was created. To use Agent Platform in your groups, you must turn on AI features for specific groups."
msgstr ""
@ -6129,9 +6144,6 @@ msgstr ""
msgid "AiPowered|GitLab Duo Self-Hosted"
msgstr ""
msgid "AiPowered|GitLab Duo Workflow"
msgstr ""
msgid "AiPowered|GitLab Duo Workflow has successfully been turned off."
msgstr ""
@ -7725,9 +7737,6 @@ msgstr ""
msgid "Analytics|Create dashboard %{dashboardSlug}"
msgstr ""
msgid "Analytics|Create with GitLab Duo (optional)"
msgstr ""
msgid "Analytics|Create your dashboard"
msgstr ""
@ -7802,9 +7811,6 @@ msgstr ""
msgid "Analytics|Enter a dashboard title"
msgstr ""
msgid "Analytics|Enter a prompt to continue."
msgstr ""
msgid "Analytics|Enter a visualization title"
msgstr ""
@ -7817,9 +7823,6 @@ msgstr ""
msgid "Analytics|Events received since %{date}"
msgstr ""
msgid "Analytics|Example: Number of users over time, grouped weekly"
msgstr ""
msgid "Analytics|Exclude anonymous users"
msgstr ""
@ -7835,12 +7838,6 @@ msgstr ""
msgid "Analytics|Failed to update project-level settings. Please try again."
msgstr ""
msgid "Analytics|Generate with Duo"
msgstr ""
msgid "Analytics|GitLab Duo may be used to help generate your visualization. You can prompt Duo with your desired data, as well as any dimensions or additional groupings of that data. You may also edit the result as needed."
msgstr ""
msgid "Analytics|Invalid dashboard configuration"
msgstr ""
@ -7919,9 +7916,6 @@ msgstr ""
msgid "Analytics|The visualization preview displays only the last 7 days. Dashboard visualizations can display the entire date range."
msgstr ""
msgid "Analytics|There was a problem generating your query. Please try again."
msgstr ""
msgid "Analytics|To create your own dashboards, first configure a project to store your dashboards."
msgstr ""
@ -7961,9 +7955,6 @@ msgstr ""
msgid "Analytics|Visualization was saved successfully"
msgstr ""
msgid "Analytics|Would you like to replace your existing selection with a new visualization generated through GitLab Duo?"
msgstr ""
msgid "Analyze your dependencies for known vulnerabilities."
msgstr ""
@ -18238,9 +18229,6 @@ msgstr ""
msgid "Continue to the next step"
msgstr ""
msgid "Continue with GitLab Duo"
msgstr ""
msgid "Continue with overages"
msgstr ""
@ -47707,9 +47695,6 @@ msgstr ""
msgid "ProductAnalytics|Back to dashboards"
msgstr ""
msgid "ProductAnalytics|By providing feedback on AI-generated content, you acknowledge that GitLab may review the prompts you submitted alongside this feedback."
msgstr ""
msgid "ProductAnalytics|Collector host"
msgstr ""
@ -47755,9 +47740,6 @@ msgstr ""
msgid "ProductAnalytics|Events"
msgstr ""
msgid "ProductAnalytics|Feedback acknowledgement"
msgstr ""
msgid "ProductAnalytics|For more information, see the %{linkStart}docs%{linkEnd}."
msgstr ""
@ -47782,12 +47764,6 @@ msgstr ""
msgid "ProductAnalytics|Help us improve Product Analytics Dashboards by sharing your experience."
msgstr ""
msgid "ProductAnalytics|Helpful"
msgstr ""
msgid "ProductAnalytics|How was the result?"
msgstr ""
msgid "ProductAnalytics|I agree to event collection and processing in this region."
msgstr ""
@ -47860,9 +47836,6 @@ msgstr ""
msgid "ProductAnalytics|Tell us what you think!"
msgstr ""
msgid "ProductAnalytics|Thank you for your feedback."
msgstr ""
msgid "ProductAnalytics|The Product Analytics Beta on GitLab.com is offered only in the Google Cloud Platform zone %{zone}."
msgstr ""
@ -47911,9 +47884,6 @@ msgstr ""
msgid "ProductAnalytics|Uncheck if you would like to configure a different provider for this project."
msgstr ""
msgid "ProductAnalytics|Unhelpful"
msgstr ""
msgid "ProductAnalytics|Usage by month"
msgstr ""
@ -47941,9 +47911,6 @@ msgstr ""
msgid "ProductAnalytics|Waiting for events"
msgstr ""
msgid "ProductAnalytics|Wrong"
msgstr ""
msgid "ProductAnalytics|You can instrument your application using a JS module or an HTML script. Follow the instructions below for the option you prefer."
msgstr ""

View File

@ -57,7 +57,6 @@ describe('Abuse Report Add Note', () => {
it('should show the comment form', () => {
expect(findAbuseReportCommentForm().exists()).toBe(true);
expect(findAbuseReportCommentForm().props()).toMatchObject({
abuseReportId: mockAbuseReportId,
isSubmitting: false,
autosaveKey: `${mockAbuseReportId}-comment`,
commentButtonText: 'Comment',

View File

@ -32,7 +32,6 @@ describe('Abuse Report Comment Form', () => {
const findCommentButton = () => wrapper.find('[data-testid="comment-button"]');
const createComponent = ({
abuseReportId = mockAbuseReportId,
isSubmitting = false,
initialValue = mockInitialValue,
autosaveKey = mockAutosaveKey,
@ -40,7 +39,6 @@ describe('Abuse Report Comment Form', () => {
} = {}) => {
wrapper = shallowMount(AbuseReportCommentForm, {
propsData: {
abuseReportId,
isSubmitting,
initialValue,
autosaveKey,

View File

@ -43,7 +43,6 @@ describe('Abuse Report Discussion', () => {
expect(findAbuseReportNote().exists()).toBe(true);
expect(findAbuseReportNote().props()).toMatchObject({
abuseReportId: mockAbuseReportId,
note: mockDiscussionWithNoReplies[0],
showReplyButton: true,
});

View File

@ -10,7 +10,6 @@ import AbuseReportEditNote from '~/admin/abuse_report/components/notes/abuse_rep
import AbuseReportCommentForm from '~/admin/abuse_report/components/notes/abuse_report_comment_form.vue';
import {
mockAbuseReport,
mockDiscussionWithNoReplies,
editAbuseReportNoteResponse,
editAbuseReportNoteResponseWithErrors,
@ -23,7 +22,6 @@ Vue.use(VueApollo);
describe('Abuse Report Edit Note', () => {
let wrapper;
const mockAbuseReportId = mockAbuseReport.report.globalId;
const mockNote = mockDiscussionWithNoReplies[0];
const mutationSuccessHandler = jest.fn().mockResolvedValue(editAbuseReportNoteResponse);
@ -33,17 +31,10 @@ describe('Abuse Report Edit Note', () => {
const findAbuseReportCommentForm = () => wrapper.findComponent(AbuseReportCommentForm);
const createComponent = ({
mutationHandler = mutationSuccessHandler,
abuseReportId = mockAbuseReportId,
discussionId = '',
note = mockNote,
} = {}) => {
const createComponent = ({ mutationHandler = mutationSuccessHandler, note = mockNote } = {}) => {
wrapper = shallowMountExtended(AbuseReportEditNote, {
apolloProvider: createMockApollo([[updateNoteMutation, mutationHandler]]),
propsData: {
abuseReportId,
discussionId,
note,
},
});
@ -57,7 +48,6 @@ describe('Abuse Report Edit Note', () => {
it('should show the comment form', () => {
expect(findAbuseReportCommentForm().exists()).toBe(true);
expect(findAbuseReportCommentForm().props()).toMatchObject({
abuseReportId: mockAbuseReportId,
isSubmitting: false,
autosaveKey: `${mockNote.id}-comment`,
commentButtonText: 'Save comment',

View File

@ -107,10 +107,7 @@ describe('Abuse Report Note', () => {
await findNoteActions().vm.$emit('startEditing');
expect(findEditNote().exists()).toBe(true);
expect(findEditNote().props()).toMatchObject({
abuseReportId: mockAbuseReportId,
note: mockNote,
});
expect(findEditNote().props('note')).toBe(mockNote);
expect(findNoteHeader().exists()).toBe(false);
expect(findNoteBody().exists()).toBe(false);

View File

@ -297,6 +297,13 @@ exports[`AutocompleteHelper for work items filters users using apollo cache for
]
`;
exports[`AutocompleteHelper for work items filters work items using apollo cache for command "/unlink" 1`] = `
[
31,
30,
]
`;
exports[`AutocompleteHelper returns expected results before and after updating data sources 1`] = `
[
"florida.schoen",

View File

@ -50,6 +50,32 @@ jest.mock('~/graphql_shared/issuable_client', () => ({
},
],
}),
linkedItems: jest.fn().mockReturnValue({
'gitlab-org/gitlab-test:1': [
{
iid: 31,
title: 'rdfhdfj',
id: null,
workItemType: {
__typename: 'WorkItemType',
id: 'gid://gitlab/WorkItems::Type/1',
name: 'Issue',
iconName: 'issue-type-issue',
},
},
{
iid: 30,
title: 'incident1',
id: null,
workItemType: {
__typename: 'WorkItemType',
id: 'gid://gitlab/WorkItems::Type/1',
name: 'Issue',
iconName: 'issue-type-issue',
},
},
],
}),
}));
describe('defaultSorter', () => {
@ -258,7 +284,17 @@ describe('AutocompleteHelper', () => {
});
autocompleteHelper.tiptapEditor = {
view: { dom: { closest: () => ({ dataset: { workItemId: 1 } }) } },
view: {
dom: {
closest: () => ({
dataset: {
workItemFullPath: 'gitlab-org/gitlab-test',
workItemId: 1,
workItemIid: 1,
},
}),
},
},
};
});
@ -272,6 +308,16 @@ describe('AutocompleteHelper', () => {
expect(results.map(({ username }) => username)).toMatchSnapshot();
});
it.each`
command
${'/unlink'}
`('filters work items using apollo cache for command "$command"', async ({ command }) => {
const dataSource = autocompleteHelper.getDataSource('issue', { command });
const results = await dataSource.search();
expect(results.map(({ iid }) => iid)).toMatchSnapshot();
});
});
it('filters items correctly for the second time, when the first command was different', async () => {

View File

@ -7,6 +7,7 @@ jest.mock('~/emoji');
jest.mock('~/content_editor/services/gl_api_markdown_deserializer');
jest.mock('~/graphql_shared/issuable_client', () => ({
currentAssignees: jest.fn().mockReturnValue({}),
linkedItems: jest.fn().mockReturnValue({}),
}));
describe('content_editor/services/create_content_editor', () => {

View File

@ -83,6 +83,24 @@ describe('sorterFor', () => {
]);
});
it('sorts status by category', () => {
const items = [
{ status: { category: 'triage' } },
{ status: { category: 'done' } },
{ status: { category: 'to_do' } },
{ status: { category: 'triage' } },
{ status: { category: 'in_progress' } },
];
expect(items.sort(sorterFor('status'))).toEqual([
{ status: { category: 'triage' } },
{ status: { category: 'triage' } },
{ status: { category: 'to_do' } },
{ status: { category: 'in_progress' } },
{ status: { category: 'done' } },
]);
});
it('handles null values: they are always pushed to the bottom regardless of the order', () => {
const items = [{ value: 'B' }, { value: null }, { value: 'A' }, { value: null }];
expect(items.sort(sorterFor('value'))).toEqual([
@ -100,43 +118,56 @@ describe('sorterFor', () => {
]);
});
it('sorts by custom type (__typename = Epic)', () => {
it.each`
__typename | field | sortField
${'Epic'} | ${'epic'} | ${'title'}
${'Label'} | ${'label'} | ${'title'}
${'Project'} | ${'project'} | ${'nameWithNamespace'}
${'UserCore'} | ${'author'} | ${'username'}
${'MergeRequestAuthor'} | ${'author'} | ${'username'}
${'MergeRequestReviewer'} | ${'reviewer'} | ${'username'}
${'MergeRequestAssignee'} | ${'assignee'} | ${'username'}
`('sorts by $sortField for $__typename', ({ __typename, field, sortField }) => {
const items = [
{ epic: { __typename: 'Epic', title: 'Epic C' } },
{ epic: { __typename: 'Epic', title: 'Epic A' } },
{ epic: { __typename: 'Epic', title: 'Epic B' } },
{ [field]: { __typename, [sortField]: 'foo' } },
{ [field]: { __typename, [sortField]: 'bar' } },
{ [field]: { __typename, [sortField]: 'baz' } },
];
expect(items.sort(sorterFor('epic'))).toEqual([
{ epic: { __typename: 'Epic', title: 'Epic A' } },
{ epic: { __typename: 'Epic', title: 'Epic B' } },
{ epic: { __typename: 'Epic', title: 'Epic C' } },
expect(items.sort(sorterFor(field))).toEqual([
{ [field]: { __typename, [sortField]: 'bar' } },
{ [field]: { __typename, [sortField]: 'baz' } },
{ [field]: { __typename, [sortField]: 'foo' } },
]);
expect(items.sort(sorterFor('epic', false))).toEqual([
{ epic: { __typename: 'Epic', title: 'Epic C' } },
{ epic: { __typename: 'Epic', title: 'Epic B' } },
{ epic: { __typename: 'Epic', title: 'Epic A' } },
expect(items.sort(sorterFor(field, false))).toEqual([
{ [field]: { __typename, [sortField]: 'foo' } },
{ [field]: { __typename, [sortField]: 'baz' } },
{ [field]: { __typename, [sortField]: 'bar' } },
]);
});
it('sorts by custom type (__typename = UserCore)', () => {
it.each`
fieldKey
${'milestone'}
${'iteration'}
`("sorts by $fieldKey's due date", ({ fieldKey }) => {
const items = [
{ author: { __typename: 'UserCore', username: 'jane' } },
{ author: { __typename: 'UserCore', username: 'jill' } },
{ author: { __typename: 'UserCore', username: 'adam' } },
{ [fieldKey]: { dueDate: '2023-06-01' } },
{ [fieldKey]: { dueDate: '2023-05-15' } },
{ [fieldKey]: { dueDate: '2023-06-15' } },
];
expect(items.sort(sorterFor('author'))).toEqual([
{ author: { __typename: 'UserCore', username: 'adam' } },
{ author: { __typename: 'UserCore', username: 'jane' } },
{ author: { __typename: 'UserCore', username: 'jill' } },
expect(items.sort(sorterFor(fieldKey))).toEqual([
{ [fieldKey]: { dueDate: '2023-05-15' } },
{ [fieldKey]: { dueDate: '2023-06-01' } },
{ [fieldKey]: { dueDate: '2023-06-15' } },
]);
expect(items.sort(sorterFor('author', false))).toEqual([
{ author: { __typename: 'UserCore', username: 'jill' } },
{ author: { __typename: 'UserCore', username: 'jane' } },
{ author: { __typename: 'UserCore', username: 'adam' } },
expect(items.sort(sorterFor(fieldKey, false))).toEqual([
{ [fieldKey]: { dueDate: '2023-06-15' } },
{ [fieldKey]: { dueDate: '2023-06-01' } },
{ [fieldKey]: { dueDate: '2023-05-15' } },
]);
});

View File

@ -146,7 +146,7 @@ describe('CRUD Component', () => {
});
describe('with persistCollapsedState=true', () => {
describe('when the localStorage key is false or undefined', () => {
describe('when the localStorage key is undefined', () => {
beforeEach(() => {
createComponent(
{
@ -170,6 +170,11 @@ describe('CRUD Component', () => {
expect(localStorage.setItem).toHaveBeenCalledWith('crud-collapse-test-anchor', true);
expect(findBody().exists()).toBe(false);
});
it('does not emit an event on mounted when no local storage key is set', () => {
expect(wrapper.emitted('collapsed')).toBeUndefined();
expect(wrapper.emitted('expanded')).toBeUndefined();
});
});
describe('when the localStorage key is true', () => {
@ -190,6 +195,11 @@ describe('CRUD Component', () => {
expect(findBody().exists()).toBe(false);
});
it('emits an event on mounted', () => {
expect(wrapper.emitted('collapsed').at(0)).toEqual([]);
expect(wrapper.emitted('expanded')).toBeUndefined();
});
it('toggles the collapsible area and sets the localStorage key to false', async () => {
findCollapseToggle().vm.$emit('click');
await nextTick();
@ -198,6 +208,26 @@ describe('CRUD Component', () => {
expect(findBody().text()).toBe('Body slot');
});
});
describe('when the localStorage key is false', () => {
beforeEach(() => {
localStorage.setItem('crud-collapse-test-anchor', 'false');
createComponent(
{
isCollapsible: true,
persistCollapsedState: true,
anchorId: 'test-anchor',
toggleText: 'Form action toggle',
},
{ default: '<p>Body slot</p>' },
);
});
it('emits an event on mounted', () => {
expect(wrapper.emitted('collapsed')).toBeUndefined();
expect(wrapper.emitted('expanded').at(0)).toEqual([]);
});
});
});
describe('isCollapsible', () => {

View File

@ -26,6 +26,7 @@ const createComponent = ({
preventRedirect = false,
fullPath = 'gitlab-org/issuable-project-path',
hiddenMetadataKeys = [],
provide = {},
} = {}) =>
shallowMount(IssuableItem, {
propsData: {
@ -50,6 +51,7 @@ const createComponent = ({
queries: { childItemLinkedItems: { loading: false } },
},
},
provide,
});
const MOCK_GITLAB_URL = TEST_HOST;
@ -953,8 +955,14 @@ describe('IssuableItem', () => {
});
it('renders draft status for draft merge requests', () => {
// Note: this is gated by the `showMergeRequestStatusDraft` feature flag currently
wrapper = createComponent({
issuable: mockDraftIssuable,
provide: {
glFeatures: {
showMergeRequestStatusDraft: true,
},
},
});
expect(findDraftStatusBadge().exists()).toBe(true);

View File

@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::ApplicationRateLimiter::IncrementResourceUsagePerAction, :freeze_time,
:clean_gitlab_redis_rate_limiting, feature_category: :shared do
let(:resource_key) { 'usage_duration_s' }
let(:resource_key) { :usage_duration_s }
let(:usage) { 100 }
let(:cache_key) { 'test' }
let(:expiry) { 60 }
@ -52,6 +52,18 @@ RSpec.describe Gitlab::ApplicationRateLimiter::IncrementResourceUsagePerAction,
expect { increment(expiry + 1) }.to change { ttl }.by(a_value > 0)
end
end
context 'when usage value is 0' do
let(:usage) { 0 }
it 'does not store the value in Redis' do
expect(increment).to eq usage
Gitlab::Redis::RateLimiting.with do |r|
expect(r.get(cache_key)).to be_nil
end
end
end
end
describe '#read' do

View File

@ -302,8 +302,8 @@ RSpec.describe Gitlab::ApplicationRateLimiter, :clean_gitlab_redis_rate_limiting
end
describe '.resource_usage_throttled?', :request_store do
let(:resource_key) { 'throttled_resource_duration' }
let(:resource_key_2) { 'another_throttled_resource_duration' }
let(:resource_key) { :throttled_resource_duration }
let(:resource_key_2) { :another_throttled_resource_duration }
let(:threshold) { 100 }
let(:interval) { 60 }

View File

@ -6,7 +6,7 @@ RSpec.describe Gitlab::FileFinder, feature_category: :global_search do
describe '#find' do
let_it_be(:project) { create(:project, :public, :repository) }
subject(:file_finder) { described_class.new(project, project.default_branch) }
subject(:file_finder) { described_class.new(project, project.default_branch_or_main) }
it_behaves_like 'file finder' do
let(:expected_file_by_path) { 'files/images/wm.svg' }
@ -99,5 +99,17 @@ RSpec.describe Gitlab::FileFinder, feature_category: :global_search do
file_finder.find(query)
end
end
context 'when there is an exception Gitlab::Git::Repository::NoRepository' do
let_it_be(:project) { create(:project, :public, :empty_repo) }
before do
allow(project.repository).to receive(:search_files_by_regexp).and_raise(Gitlab::Git::Repository::NoRepository)
end
it 'returns empty array' do
expect(file_finder.find('foo')).to be_empty
end
end
end
end

View File

@ -748,7 +748,7 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
end
end
context 'when sending an ivalid regexp' do
context 'when sending an invalid regexp' do
let(:filter) { '*.rb' }
it 'raises error' do
@ -765,6 +765,18 @@ RSpec.describe Gitlab::Git::Repository, feature_category: :source_code_managemen
expect(result).to eq([])
end
end
context 'when client raises an exception GRPC::NotFound' do
let(:filter) { 'files\/.*\/.*\.rb' }
before do
allow(mutable_repository.gitaly_repository_client).to receive(:search_files_by_regexp).and_raise(GRPC::NotFound)
end
it 'raises an exception NoRepository' do
expect { result }.to raise_error(Gitlab::Git::Repository::NoRepository)
end
end
end
describe '#search_files_by_name' do

View File

@ -465,6 +465,10 @@ RSpec.describe UserDetail, feature_category: :system_access do
expect(user_detail.errors.full_messages).to match_array(["Website url is not a valid URL"])
end
end
it { is_expected.to validate_length_of(:email_otp).is_equal_to(64).allow_nil }
it { is_expected.to validate_length_of(:email_otp_last_sent_to).is_at_most(511) }
end
describe '#save' do
@ -553,4 +557,26 @@ RSpec.describe UserDetail, feature_category: :system_access do
details.valid?
end
end
describe '#email_otp' do
let(:user_detail) { build(:user_detail) }
let(:hashed_value) { Devise.token_generator.digest(User, generate(:email), '123456') }
it 'can be set to a hashed value' do
expect { user_detail.email_otp = hashed_value }
.to change { user_detail.email_otp }.to(hashed_value)
end
end
describe '#as_json' do
let(:user_detail) { build(:user_detail, email_otp: Digest::SHA2.hexdigest('')) }
it 'includes attributes' do
expect(user_detail.as_json.keys).not_to be_empty
end
it 'does not include email_otp' do
expect(user_detail.as_json).not_to have_key('email_otp')
end
end
end

View File

@ -206,6 +206,15 @@ RSpec.describe User, feature_category: :user_profile do
it { is_expected.to delegate_method(:bot_namespace).to(:user_detail).allow_nil }
it { is_expected.to delegate_method(:bot_namespace=).to(:user_detail).with_arguments(:args).allow_nil }
it { is_expected.to delegate_method(:email_otp).to(:user_detail).allow_nil }
it { is_expected.to delegate_method(:email_otp=).to(:user_detail).with_arguments(:args).allow_nil }
it { is_expected.to delegate_method(:email_otp_required_after).to(:user_detail).allow_nil }
it { is_expected.to delegate_method(:email_otp_required_after=).to(:user_detail).with_arguments(:args).allow_nil }
it { is_expected.to delegate_method(:email_otp_last_sent_at).to(:user_detail).allow_nil }
it { is_expected.to delegate_method(:email_otp_last_sent_at=).to(:user_detail).with_arguments(:args).allow_nil }
it { is_expected.to delegate_method(:email_otp_last_sent_to).to(:user_detail).allow_nil }
it { is_expected.to delegate_method(:email_otp_last_sent_to=).to(:user_detail).with_arguments(:args).allow_nil }
end
describe 'associations' do