Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-04-25 18:09:47 +00:00
parent 52f444e8c6
commit 0d9bbc854d
47 changed files with 679 additions and 675 deletions

View File

@ -205,9 +205,10 @@ export default {
},
},
methods: {
onRefetch() {
this.apolloClient.resetStore();
async onRefetch() {
await this.apolloClient.clearStore();
this.$apollo.queries.items.refetch();
this.$emit('refetch');
},
onKeysetNext(endCursor) {
this.$emit('keyset-page-change', {

View File

@ -230,28 +230,7 @@ export default {
},
},
async created() {
if (!Object.keys(this.tabCountsQuery).length) {
return;
}
try {
const { data } = await this.$apollo.query({ query: this.tabCountsQuery });
this.tabCounts = this.tabs.reduce((accumulator, tab) => {
const { count } = get(data, tab.countsQueryPath);
return {
...accumulator,
[tab.value]: count,
};
}, {});
} catch (error) {
createAlert({
message: this.tabCountsQueryErrorMessage,
error,
captureError: true,
});
}
this.getTabCounts();
},
methods: {
numberToMetricPrefix,
@ -359,6 +338,9 @@ export default {
onOffsetPageChange(page) {
this.pushQuery({ ...this.$route.query, [QUERY_PARAM_PAGE]: page });
},
onRefetch() {
this.getTabCounts();
},
async userPreferencesUpdateMutate(sort) {
try {
await this.$apollo.mutate({
@ -374,6 +356,30 @@ export default {
Sentry.captureException(error);
}
},
async getTabCounts() {
if (!Object.keys(this.tabCountsQuery).length) {
return;
}
try {
const { data } = await this.$apollo.query({ query: this.tabCountsQuery });
this.tabCounts = this.tabs.reduce((accumulator, tab) => {
const { count } = get(data, tab.countsQueryPath);
return {
...accumulator,
[tab.value]: count,
};
}, {});
} catch (error) {
createAlert({
message: this.tabCountsQueryErrorMessage,
error,
captureError: true,
});
}
},
},
};
</script>
@ -409,6 +415,7 @@ export default {
:pagination-type="paginationType"
@keyset-page-change="onKeysetPageChange"
@offset-page-change="onOffsetPageChange"
@refetch="onRefetch"
/>
<template v-else>{{ tab.text }}</template>
</gl-tab>

View File

@ -4,6 +4,7 @@ import { produce } from 'immer';
import { createAlert } from '~/alert';
import { __, s__ } from '~/locale';
import getBranchesOptionsQuery from '../graphql/queries/get_branches_options.query.graphql';
import { BRANCH_ANY } from '../constants';
const BRANCH_PAGINATION_LIMIT = 10;
@ -85,7 +86,7 @@ export default {
return [
{
text: s__('PipelineCharts|All branches'),
value: '', // use '' to represent no value selected, as GlCollapsibleListbox does not accept null as a valid value
value: BRANCH_ANY,
},
...this.branchesOptions.map((branch) => ({
text: branch,
@ -152,7 +153,6 @@ export default {
:block="block"
:items="items"
:title="__('Switch branch')"
:toggle-text="branch"
:search-placeholder="s__('Branches|Filter by branch name')"
:infinite-scroll-loading="loading"
:infinite-scroll="infiniteScroll"

View File

@ -2,7 +2,8 @@
import { s__ } from '~/locale';
import { createAlert } from '~/alert';
import { getDateInPast } from '~/lib/utils/datetime_utility';
import { SOURCE_ANY, DATE_RANGE_7_DAYS, DATE_RANGES_AS_DAYS } from '../constants';
import { DATE_RANGES_AS_DAYS, DATE_RANGE_DEFAULT, BRANCH_ANY } from '../constants';
import { updateQueryHistory, paramsFromQuery } from '../url_utils';
import getPipelineAnalytics from '../graphql/queries/get_pipeline_analytics.query.graphql';
import DashboardHeader from './dashboard_header.vue';
@ -35,12 +36,15 @@ export default {
},
},
data() {
const defaultParams = {
source: null,
branch: this.defaultBranch,
dateRange: DATE_RANGE_DEFAULT,
};
return {
params: {
source: SOURCE_ANY,
dateRange: DATE_RANGE_7_DAYS,
branch: this.defaultBranch,
},
defaultParams,
params: paramsFromQuery(window.location.search, defaultParams),
pipelineAnalytics: {
aggregate: {
count: null,
@ -82,8 +86,8 @@ export default {
return {
fullPath: this.projectPath,
source: this.params.source === SOURCE_ANY ? null : this.params.source,
branch: this.params.branch || null,
source: this.params.source || null,
branch: (this.params.branch === BRANCH_ANY ? null : this.params.branch) || null,
fromTime: getDateInPast(today, DATE_RANGES_AS_DAYS[this.params.dateRange] || 7),
toTime: today,
};
@ -99,6 +103,22 @@ export default {
};
},
},
mounted() {
window.addEventListener('popstate', this.updateParamsFromQuery);
},
beforeDestroy() {
window.removeEventListener('popstate', this.updateParamsFromQuery);
},
methods: {
updateParamsFromQuery() {
this.params = paramsFromQuery(window.location.search, this.defaultParams);
},
onFiltersInput(params) {
this.params = params;
updateQueryHistory(this.params, this.defaultParams);
},
},
};
</script>
<template>
@ -107,10 +127,11 @@ export default {
{{ s__('PipelineCharts|Pipelines') }}
</dashboard-header>
<pipelines-dashboard-clickhouse-filters
v-model="params"
:value="params"
:default-branch="defaultBranch"
:project-path="projectPath"
:project-branch-count="projectBranchCount"
@input="onFiltersInput($event)"
/>
<div>
<statistics-list :loading="loading" :counts="formattedCounts" />

View File

@ -7,7 +7,6 @@ import {
DATE_RANGE_90_DAYS,
DATE_RANGE_180_DAYS,
DATE_RANGE_DEFAULT,
SOURCE_ANY,
SOURCE_PUSH,
SOURCE_SCHEDULE,
SOURCE_MERGE_REQUEST_EVENT,
@ -32,7 +31,7 @@ import {
import BranchCollapsibleListbox from './branch_collapsible_listbox.vue';
const sourcesItems = [
{ value: SOURCE_ANY, text: s__('PipelineSource|Any source') },
{ value: null, text: s__('PipelineSource|Any source') },
{ value: SOURCE_PUSH, text: s__('PipelineSource|Push') },
{ value: SOURCE_SCHEDULE, text: s__('PipelineSource|Schedule') },
{ value: SOURCE_MERGE_REQUEST_EVENT, text: s__('PipelineSource|Merge Request Event') },
@ -83,11 +82,7 @@ export default {
props: {
value: {
type: Object,
default: () => ({
source: SOURCE_ANY,
dateRange: DATE_RANGE_7_DAYS,
branch: null,
}),
default: null,
required: false,
},
defaultBranch: {
@ -106,25 +101,36 @@ export default {
},
},
data() {
const { source, branch, dateRange } = this.value;
const isValidSource = sourcesItems.map(({ value }) => value).includes(source);
const isValidDateRange = dateRangeItems.map(({ value }) => value).includes(dateRange);
return {
params: {
source: isValidSource ? source : SOURCE_ANY,
dateRange: isValidDateRange ? dateRange : DATE_RANGE_DEFAULT,
branch: branch || this.defaultBranch,
},
source: null,
branch: null,
dateRange: null,
};
},
watch: {
params: {
handler(params) {
this.$emit('input', params);
value: {
handler() {
const { source, branch, dateRange } = this.value || {};
const isValidSource = sourcesItems.map((s) => s.value).includes(source);
const isValidDateRange = dateRangeItems.map((d) => d.value).includes(dateRange);
this.source = isValidSource ? source : null;
this.branch = branch || null;
this.dateRange = isValidDateRange ? dateRange : DATE_RANGE_DEFAULT;
},
deep: true,
immediate: true,
},
},
methods: {
onSelect(param, value) {
this[param] = value;
this.$emit('input', {
source: this.source,
branch: this.branch,
dateRange: this.dateRange,
});
},
},
sourcesItems,
@ -140,19 +146,21 @@ export default {
>
<gl-collapsible-listbox
id="pipeline-source"
v-model="params.source"
:selected="source"
block
:items="$options.sourcesItems"
@select="onSelect('source', $event)"
/>
</gl-form-group>
<gl-form-group class="gl-min-w-full sm:gl-min-w-26" :label="__('Branch')" label-for="branch">
<branch-collapsible-listbox
id="branch"
v-model="params.branch"
:selected="branch"
block
:default-branch="defaultBranch"
:project-path="projectPath"
:project-branch-count="projectBranchCount"
@select="onSelect('branch', $event)"
/>
</gl-form-group>
<gl-form-group
@ -162,9 +170,10 @@ export default {
>
<gl-collapsible-listbox
id="date-range"
v-model="params.dateRange"
:selected="dateRange"
block
:items="$options.dateRangeItems"
@select="onSelect('dateRange', $event)"
/>
</gl-form-group>
</div>

View File

@ -10,6 +10,8 @@ export const ONE_WEEK_AGO_DAYS = 7;
export const ONE_MONTH_AGO_DAYS = 31;
export const ONE_YEAR_AGO_DAYS = 365;
export const BRANCH_ANY = '~any'; // ~ is not allowed as part of a branch name so it can be used as a special identifier, see https://git-scm.com/docs/git-check-ref-format#_description
export const DATE_RANGE_7_DAYS = '7d';
export const DATE_RANGE_30_DAYS = '30d';
export const DATE_RANGE_90_DAYS = '90d';
@ -34,7 +36,6 @@ export const SNOWPLOW_SCHEMA = 'iglu:com.gitlab/gitlab_service_ping/jsonschema/1
export const SNOWPLOW_DATA_SOURCE = 'redis_hll';
// CiPipelineSources values from GraphQL schema.
export const SOURCE_ANY = 'ANY'; // This is a special value, not part of CiPipelineSources.
export const SOURCE_PUSH = 'PUSH';
export const SOURCE_SCHEDULE = 'SCHEDULE';
export const SOURCE_MERGE_REQUEST_EVENT = 'MERGE_REQUEST_EVENT';

View File

@ -0,0 +1,38 @@
import { queryToObject, updateHistory, mergeUrlParams } from '~/lib/utils/url_utility';
const PARAM_KEY_SOURCE = 'source';
const PARAM_KEY_BRANCH = 'branch';
const PARAM_KEY_DATE_RANGE = 'time';
/**
* Returns an object that represents parameters in the URL
*
* @param {Object} params - URL query string, defaults to the current `window.location.search`
* @param {Object} params - Default values, so URL does not have to add redundant values
*/
export const paramsFromQuery = (searchString = window.location.search, defaultParams = {}) => {
const query = queryToObject(searchString);
return {
source: query[PARAM_KEY_SOURCE] || defaultParams.source,
branch: query[PARAM_KEY_BRANCH] || defaultParams.branch,
dateRange: query[PARAM_KEY_DATE_RANGE] || defaultParams.dateRange,
};
};
/**
* Updates the browser URL bar with some parameters
*
* @param {Object} params - Current params to represent in the URL
* @param {Object} params - Default values, so URL is not updated with redundant values
*/
export const updateQueryHistory = (params, defaultParams = {}) => {
const { source, branch, dateRange } = params;
const query = {
[PARAM_KEY_SOURCE]: source === defaultParams.source ? null : source,
[PARAM_KEY_BRANCH]: branch === defaultParams.branch ? null : branch,
[PARAM_KEY_DATE_RANGE]: dateRange === defaultParams.dateRange ? null : dateRange,
};
updateHistory({
url: mergeUrlParams(query, window.location.href, { sort: true }),
});
};

View File

@ -0,0 +1,37 @@
import ColorPicker from './color_picker.vue';
const propDefault = (prop) => {
const defaultValue = ColorPicker.props[prop].default;
return typeof defaultValue === 'function' ? defaultValue() : defaultValue;
};
const makeStory = ({ props } = {}) => {
const Story = (args, { argTypes }) => ({
components: { ColorPicker },
props: Object.keys(argTypes),
template: '<color-picker v-bind="$props" />',
});
Story.args = {
...Object.fromEntries(Object.keys(ColorPicker.props).map((prop) => [prop, propDefault(prop)])),
suggestedColors: {},
...props,
};
return Story;
};
export const Default = makeStory();
export const InvalidState = makeStory({
props: {
value: 'foo',
state: false,
},
});
export default {
component: ColorPicker,
title: 'vue_shared/components/color_picker',
};

View File

@ -15,9 +15,7 @@
import { GlFormGroup, GlFormInput, GlFormInputGroup, GlLink, GlTooltipDirective } from '@gitlab/ui';
import { uniqueId } from 'lodash';
import { __, s__ } from '~/locale';
const PREVIEW_COLOR_DEFAULT_CLASSES =
'gl-relative gl-w-7 gl-bg-subtle gl-rounded-tl-base gl-rounded-bl-base';
import { BORDER_COLOR_ERROR, BORDER_COLOR_DEFAULT } from './constants';
export default {
name: 'ColorPicker',
@ -68,18 +66,11 @@ export default {
? s__('ColorPicker|Enter any hex color or choose one of the suggested colors below.')
: s__('ColorPicker|Enter any hex color.');
},
previewColor() {
if (this.state) {
return { backgroundColor: this.value };
}
return {};
},
previewColorClasses() {
const borderStyle =
this.state === false ? 'gl-shadow-inner-1-red-500' : 'gl-shadow-inner-1-gray-400';
return `${PREVIEW_COLOR_DEFAULT_CLASSES} ${borderStyle}`;
previewStyle() {
return {
backgroundColor: this.state ? this.value : null,
borderColor: this.state === false ? BORDER_COLOR_ERROR : BORDER_COLOR_DEFAULT,
};
},
hasSuggestedColors() {
return Object.keys(this.suggestedColors).length;
@ -118,7 +109,11 @@ export default {
>
<!-- eslint-enable @gitlab/vue-require-i18n-attribute-strings -->
<template #prepend>
<div :class="previewColorClasses" :style="previewColor" data-testid="color-preview">
<div
class="gl-relative gl-w-7 gl-rounded-bl-base gl-rounded-tl-base gl-border-1 gl-border-solid gl-bg-subtle"
:style="previewStyle"
data-testid="color-preview"
>
<gl-form-input
:id="id"
type="color"

View File

@ -0,0 +1,2 @@
export const BORDER_COLOR_ERROR = 'var(--gl-control-border-color-error)';
export const BORDER_COLOR_DEFAULT = 'var(--gl-control-border-color-default)';

View File

@ -30,6 +30,7 @@ class ApplicationController < BaseActionController
include StrongPaginationParams
include Gitlab::HttpRouter::RuleContext
include Gitlab::HttpRouter::RuleMetrics
include ViteCSP
before_action :authenticate_user!, except: [:route_not_found]
before_action :set_current_organization

View File

@ -24,27 +24,6 @@ class BaseActionController < ActionController::Base
content_security_policy do |p|
next if p.directives.blank?
if helpers.vite_enabled?
# Normally all Vite requests are proxied via Vite Ruby's middleware (example:
# https://gdk.test:3000/vite-dev/@fs/path/to/your/gdk), unless the
# skipProxy parameter is used (https://vite-ruby.netlify.app/config/#skipproxy-experimental).
#
# However, HMR requests go directly to another host, and we need to allow that.
# We need both Websocket and HTTP URLs because Vite will attempt to ping
# the HTTP URL if the Websocket isn't available:
# https://github.com/vitejs/vite/blob/899d9b1d272b7057aafc6fa01570d40f288a473b/packages/vite/src/client/client.ts#L320-L327
hmr_ws_url = Gitlab::Utils.append_path(helpers.vite_hmr_websocket_url, 'vite-dev/')
hmr_http_url = Gitlab::Utils.append_path(helpers.vite_hmr_http_url, 'vite-dev/')
http_path = Gitlab::Utils.append_path(Gitlab.config.gitlab.url, 'vite-dev/')
connect_sources = p.directives['connect-src']
p.connect_src(*(Array.wrap(connect_sources) | [hmr_ws_url, hmr_http_url]))
worker_sources = p.directives['worker-src']
p.worker_src(*(Array.wrap(worker_sources) | [hmr_ws_url, hmr_http_url, http_path]))
end
next unless Gitlab::CurrentSettings.snowplow_enabled? && !Gitlab::CurrentSettings.snowplow_collector_hostname.blank?
default_connect_src = p.directives['connect-src'] || p.directives['default-src']

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
module ViteCSP
extend ActiveSupport::Concern
included do
content_security_policy_with_context do |p|
next unless helpers.vite_enabled?
next if p.directives.blank?
# We need both Websocket and HTTP URLs because Vite will attempt to ping
# the HTTP URL if the Websocket isn't available:
# https://github.com/vitejs/vite/blob/899d9b1d272b7057aafc6fa01570d40f288a473b/packages/vite/src/client/client.ts#L320-L327
hmr_ws_url = Gitlab::Utils.append_path(helpers.vite_hmr_ws_origin, 'vite-dev/')
http_path = Gitlab::Utils.append_path(helpers.vite_origin, 'vite-dev/')
# http_path is used for openInEditorHost feature
# https://devtools.vuejs.org/getting-started/open-in-editor#customize-request
p.connect_src(*(Array.wrap(p.directives['connect-src']) | [hmr_ws_url, http_path]))
p.worker_src(*(Array.wrap(p.directives['worker-src']) | [http_path]))
p.style_src(*(Array.wrap(p.directives['style-src']) | [http_path]))
p.font_src(*(Array.wrap(p.directives['font-src']) | [http_path]))
end
end
end

View File

@ -507,6 +507,9 @@ module MergeRequestsHelper
query: 'reviewRequestedMergeRequests',
variables: {
reviewStates: %w[APPROVED REQUESTED_CHANGES REVIEWED],
not: {
reviewStates: %w[UNREVIEWED REVIEW_STARTED UNAPPROVED]
},
perPage: 10
}
},
@ -516,7 +519,10 @@ module MergeRequestsHelper
helpContent: _(''),
query: is_author_or_assignee ? 'authorOrAssigneeMergeRequests' : 'assignedMergeRequests',
variables: {
reviewStates: %w[REQUESTED_CHANGES REVIEWED],
or: {
reviewerWildcard: 'NONE',
reviewStates: %w[REQUESTED_CHANGES REVIEWED]
},
perPage: 10
}
},
@ -528,6 +534,9 @@ module MergeRequestsHelper
query: is_author_or_assignee ? 'authorOrAssigneeMergeRequests' : 'assignedMergeRequests',
variables: {
reviewStates: %w[APPROVED UNAPPROVED UNREVIEWED REVIEW_STARTED],
not: {
reviewStates: %w[REVIEWED REQUESTED_CHANGES]
},
perPage: 10
}
}

View File

@ -8,12 +8,13 @@ module ViteHelper
Gitlab::Utils.to_boolean(ViteRuby.env['VITE_ENABLED'], default: false)
end
def vite_hmr_websocket_url
ViteRuby.env['VITE_HMR_WS_URL']
def vite_origin
ViteRuby.config.origin
end
def vite_hmr_http_url
ViteRuby.env['VITE_HMR_HTTP_URL']
def vite_hmr_ws_origin
protocol = ViteRuby.config.https ? 'wss' : 'ws'
"#{protocol}://#{ViteRuby.config.host_with_port}"
end
def vite_page_entrypoint_paths(custom_action_name = nil)
@ -33,12 +34,6 @@ module ViteHelper
def universal_stylesheet_link_tag(path, **options)
return stylesheet_link_tag(path, **options) unless vite_enabled?
if Rails.env.test? && config.asset_host
# Link directly to Vite server when running tests because for unit and integration tests, there
# won't be a Rails server to proxy these requests to the Vite server.
options[:host] = URI::HTTP.build(host: ViteRuby.config.host, port: ViteRuby.config.port).to_s
end
options[:extname] = false
stylesheet_link_tag(

View File

@ -572,11 +572,7 @@ class MergeRequest < ApplicationRecord
end
scope :without_hidden, -> {
if Feature.enabled?(:hide_merge_requests_from_banned_users)
where_not_exists(Users::BannedUser.where('merge_requests.author_id = banned_users.user_id'))
else
all
end
where_not_exists(Users::BannedUser.where('merge_requests.author_id = banned_users.user_id'))
}
scope :merged_without_state_event_source, -> {
@ -2451,7 +2447,7 @@ class MergeRequest < ApplicationRecord
end
def hidden?
Feature.enabled?(:hide_merge_requests_from_banned_users) && author&.banned?
author&.banned?
end
def diffs_batch_cache_with_max_age?

View File

@ -1,8 +0,0 @@
---
name: hide_merge_requests_from_banned_users
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/107836
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/386726
milestone: "15.8"
type: development
group: group::authorization
default_enabled: false

View File

@ -14,7 +14,7 @@
"ee/images/*",
"jh/images/*"
],
"port": 3038,
"skipProxy": true,
"publicOutputDir": "vite-dev",
"devServerConnectTimeout": 3
}

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class CleanupBackfillMissingNamespaceIdOnNotes < Gitlab::Database::Migration[2.2]
milestone '18.0'
MIGRATION = 'BackfillMissingNamespaceIdOnNotes'
restrict_gitlab_migration gitlab_schema: :gitlab_main
def up
# rubocop:disable Migration/BatchMigrationsPostOnly -- Delete in a migration rather than post_migration
# to delete the batched migration before it might be enqueued
delete_batched_background_migration(MIGRATION, :notes, :id, [])
# rubocop:enable Migration/BatchMigrationsPostOnly
end
def down; end
end

View File

@ -17,19 +17,11 @@ class QueueBackfillMissingNamespaceIdOnNotes < Gitlab::Database::Migration[2.2]
GITLAB_OPTIMIZED_BATCH_SIZE = 75_000
GITLAB_OPTIMIZED_SUB_BATCH_SIZE = 250
def up
queue_batched_background_migration(
MIGRATION,
:notes,
:id,
job_interval: DELAY_INTERVAL,
**batch_sizes
)
end
# No longer needed as we are now going to backfill only non project notes
# gitlab.com/gitlab-org/gitlab/-/issues/444222
def up; end
def down
delete_batched_background_migration(MIGRATION, :notes, :id, [])
end
def down; end
private

View File

@ -0,0 +1 @@
217682c74afb328af59dd6cf7c5283ed485b9ba7868d34881e1c096aaee5398f

View File

@ -307,6 +307,7 @@ Users can also be reactivated using the [GitLab API](../api/user_moderation.md#r
- Hiding merge requests of banned users [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/107836) in GitLab 15.8 [with a flag](feature_flags.md) named `hide_merge_requests_from_banned_users`. Disabled by default.
- Hiding comments of banned users [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/112973) in GitLab 15.11 [with a flag](feature_flags.md) named `hidden_notes`. Disabled by default.
- Hiding projects of banned users [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/121488) in GitLab 16.2 [with a flag](feature_flags.md) named `hide_projects_of_banned_users`. Disabled by default.
- Hiding merge requests of banned users [generally available](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/188770) in GitLab 18.0. Feature flag `hide_merge_requests_from_banned_users` removed.
{{< /history >}}

View File

@ -799,9 +799,11 @@ scoped to a group or project return no results.
## Advanced search migrations
With reindex migrations running in the background, there's no need for a manual
intervention. This usually happens in situations where new features are added to
advanced search, which means adding or changing the way content is indexed.
Reindex migrations run in the background, which means
you do not have to reindex the instance manually.
With the `elastic_migration_worker_enabled` application setting,
you can turn on or off the migration worker.
By default, the migration worker is on.
### Migration dictionary files

View File

@ -251,7 +251,7 @@ On GitLab Self-Managed, by default this feature is not available. To make it ava
{{< /alert >}}
You can enforce 2FA for [Git over SSH operations](../development/gitlab_shell/features.md#git-operations). However, you should use
[ED25519_SK](../user/ssh.md#ed25519_sk-ssh-keys) or [ECDSA_SK](../user/ssh.md#ecdsa_sk-ssh-keys) SSH keys instead. 2FA is enforced for Git operations only, and internal commands such as [`personal_access_token`](../development/gitlab_shell/features.md#personal-access-token) are excluded.
[ED25519_SK](../user/ssh.md#ed25519_sk-ssh-keys) or [ECDSA_SK](../user/ssh.md#ecdsa_sk-ssh-keys) SSH keys instead. 2FA is enforced for Git operations only, and internal commands from GitLab Shell such as `personal_access_token` are excluded.
To perform one-time password (OTP) verification, run:

View File

@ -1,93 +0,0 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillMissingNamespaceIdOnNotes < BatchedMigrationJob
operation_name :backfill_missing_namespace_id_on_notes
feature_category :code_review_workflow
def perform
each_sub_batch do |sub_batch|
Gitlab::Database.allow_cross_joins_across_databases(
url: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/163687'
) do
connection.execute(build_query(sub_batch))
end
end
end
private
# rubocop:disable Layout/LineLength -- SQL!
# rubocop:disable Metrics/MethodLength -- I do what I want
def build_query(scope)
records_query = scope.where(namespace_id: nil).select("
id,
(
coalesce(
(case
when exists (select 1 from projects where id = notes.project_id) then (select namespace_id from projects where id = notes.project_id)
when noteable_type = 'AlertManagement::Alert' then (select namespace_id from projects where id = (select project_id from alert_management_alerts where noteable_id = notes.id limit 1) limit 1)
when noteable_type = 'MergeRequest' then (select namespace_id from projects where id = (select project_id from merge_requests where noteable_id = notes.id limit 1) limit 1)
when noteable_type = 'Vulnerability' then (select namespace_id from projects where id = (select project_id from vulnerabilities where noteable_id = notes.id limit 1) limit 1)
-- These 2 need to pull namespace_id from the noteable
when noteable_type = 'DesignManagement::Design' then (select namespace_id from design_management_designs where id = notes.noteable_id limit 1)
when noteable_type = 'Issue' then (select namespace_id from issues where id = notes.noteable_id limit 1)
-- Epics pull in group_id
when noteable_type = 'Epic' then (select group_id from epics where id = notes.noteable_id limit 1)
-- Snippets pull from author
when noteable_type = 'Snippet' then (select id from namespaces where owner_id = (select author_id from notes where id = notes.id limit 1) limit 1)
-- Commits pull namespace_id from the project of the note
when noteable_type = 'Commit' then (select namespace_id from projects where id = notes.project_id limit 1)
else
-1
end
), -1)) as namespace_id_to_set
")
<<~SQL
with records AS (
#{records_query.to_sql}
), updated_rows as (
-- updating records with the located namespace_id_to_set value
update notes set namespace_id = namespace_id_to_set from records where records.id=notes.id and namespace_id_to_set <> -1
), deleted_rows as (
-- deleting the records where we couldn't find the namespace id
delete from notes where id IN (select id from records where namespace_id_to_set = -1)
)
select 1
SQL
end
# rubocop:enable Layout/LineLength
# rubocop:enable Metrics/MethodLength
def backfillable?(note)
note.noteable_type.present?
end
def extract_namespace_id(note)
# Attempt to find namespace_id from the project first.
#
if note.project_id
project = Project.find_by_id(note.project_id)
return project.namespace_id if project
end
# We have to load the noteable here because we don't have access to the
# usual ActiveRecord relationships to do it for us.
#
noteable = note.noteable_type.constantize.find(note.noteable_id)
case note.noteable_type
when "AlertManagement::Alert", "Commit", "MergeRequest", "Vulnerability"
noteable.project.namespace_id
when "DesignManagement::Design", "Epic", "Issue"
noteable.namespace_id
when "Snippet"
noteable.author.namespace_id
end
end
end
end
end

View File

@ -15,23 +15,13 @@ module ViteGdk
return unless enabled
# From https://vitejs.dev/config/server-options
host = config['host'] || 'localhost'
port = Integer(config['port'] || 3808)
hmr_config = config['hmr'] || {}
hmr_host = hmr_config['host'] || host
hmr_port = hmr_config['clientPort'] || hmr_config['port'] || port
hmr_ws_protocol = hmr_config['protocol'] || 'ws'
hmr_http_protocol = hmr_ws_protocol == 'wss' ? 'https' : 'http'
ViteRuby.env['VITE_HMR_HOST'] = hmr_host
# If the Websocket connection to the HMR host is not up, Vite will attempt to
# ping the HMR host via HTTP or HTTPS:
# https://github.com/vitejs/vite/blob/899d9b1d272b7057aafc6fa01570d40f288a473b/packages/vite/src/client/client.ts#L320-L327
ViteRuby.env['VITE_HMR_HTTP_URL'] = "#{hmr_http_protocol}://#{hmr_host}:#{hmr_port}"
ViteRuby.env['VITE_HMR_WS_URL'] = "#{hmr_ws_protocol}://#{hmr_host}:#{hmr_port}"
host = config['public_host'] || 'localhost'
ViteRuby.env['VITE_HMR_HOST'] = host
ViteRuby.configure(
host: host,
port: port
port: Integer(config['port'] || 3808),
https: config.fetch('https', { 'enabled' => false })['enabled']
)
end

View File

@ -5723,6 +5723,9 @@ msgstr ""
msgid "AiPowered|Connection method"
msgstr ""
msgid "AiPowered|Contact sales for Duo Enterprise"
msgstr ""
msgid "AiPowered|Direct connections"
msgstr ""
@ -5771,6 +5774,9 @@ msgstr ""
msgid "AiPowered|GitLab Duo Core available to all users"
msgstr ""
msgid "AiPowered|GitLab Duo Pro or Enterprise"
msgstr ""
msgid "AiPowered|GitLab Duo Self-Hosted"
msgstr ""
@ -5810,6 +5816,9 @@ msgstr ""
msgid "AiPowered|Participate in the Early Access Program and help make GitLab better"
msgstr ""
msgid "AiPowered|Purchase Duo Pro seats"
msgstr ""
msgid "AiPowered|Seat assignment for GitLab Duo has moved"
msgstr ""
@ -5825,9 +5834,18 @@ msgstr ""
msgid "AiPowered|Start date: %{startDate}"
msgstr ""
msgid "AiPowered|Tanuki AI icon"
msgstr ""
msgid "AiPowered|Turn on experiment and beta GitLab Duo features"
msgstr ""
msgid "AiPowered|Unlock advanced AI-powered capabilities with the Premium or Ultimate tier designed for your development needs."
msgstr ""
msgid "AiPowered|Upgrade to"
msgstr ""
msgid "AiPowered|Use beta models and features in GitLab Duo Self-Hosted"
msgstr ""
@ -30823,6 +30841,9 @@ msgstr ""
msgid "How do I use a web terminal?"
msgstr ""
msgid "How does it work?"
msgstr ""
msgid "How does pull mirroring work?"
msgstr ""
@ -35534,12 +35555,6 @@ msgstr ""
msgid "Learn more about GitLab"
msgstr ""
msgid "Learn more about Root Cause Analysis"
msgstr ""
msgid "Learn more about Root Cause Analysis in new tab"
msgstr ""
msgid "Learn more about Service Desk"
msgstr ""
@ -49591,9 +49606,6 @@ msgstr ""
msgid "Quick start guide"
msgstr ""
msgid "Quickly identify the root cause of an incident using AI-assisted analysis."
msgstr ""
msgid "README"
msgstr ""
@ -51347,9 +51359,6 @@ msgstr ""
msgid "Rollback"
msgstr ""
msgid "Root Cause Analysis"
msgstr ""
msgid "Root Moved Permanently redirection response"
msgstr ""
@ -61163,6 +61172,9 @@ msgstr ""
msgid "There are no closed merge requests"
msgstr ""
msgid "There are no comments to summarize."
msgstr ""
msgid "There are no commits yet"
msgstr ""
@ -62173,9 +62185,6 @@ msgstr ""
msgid "This repository was last checked %{last_check_timestamp}. The check passed."
msgstr ""
msgid "This resource has no comments to summarize"
msgstr ""
msgid "This setting can be overridden in each project."
msgstr ""
@ -63934,6 +63943,9 @@ msgstr ""
msgid "Troubleshoot failed CI/CD jobs with Root Cause Analysis."
msgstr ""
msgid "Troubleshoot failed jobs with Root Cause Analysis"
msgstr ""
msgid "True"
msgstr ""
@ -65163,6 +65175,9 @@ msgstr ""
msgid "Use .gitlab-ci.yml"
msgstr ""
msgid "Use AI to quickly identify the cause of job failures and get example fixes to get your pipeline running."
msgstr ""
msgid "Use Amazon Q to streamline development workflow and project upgrades"
msgstr ""

View File

@ -9,7 +9,7 @@ RSpec.describe 'Subscriptions Content Security Policy', feature_category: :integ
let(:qsh) { Atlassian::Jwt.create_query_string_hash('https://gitlab.test/subscriptions', 'GET', 'https://gitlab.test') }
let(:jwt) { Atlassian::Jwt.encode({ iss: installation.client_key, qsh: qsh }, installation.shared_secret) }
subject { response_headers['Content-Security-Policy'] }
subject(:csp) { parse_csp response_headers['Content-Security-Policy'] }
context 'when there is no global config' do
before do
@ -36,9 +36,19 @@ RSpec.describe 'Subscriptions Content Security Policy', feature_category: :integ
it 'appends to CSP directives' do
visit jira_connect_subscriptions_path(jwt: jwt)
is_expected.to include("frame-ancestors 'self' https://*.atlassian.net https://*.jira.com")
is_expected.to include("script-src 'self' https://some-cdn.test https://connect-cdn.atl-paas.net")
is_expected.to include("style-src 'self' https://some-cdn.test 'unsafe-inline'")
frame_ancestors = "'self' https://*.atlassian.net https://*.jira.com".split(' ')
script_src = "'self' https://some-cdn.test https://connect-cdn.atl-paas.net".split(' ')
style_src = "'self' https://some-cdn.test 'unsafe-inline'".split(' ')
expect(csp['frame-ancestors']).to include(*frame_ancestors)
expect(csp['script-src']).to include(*script_src)
expect(csp['style-src']).to include(*style_src)
end
end
def parse_csp(csp)
csp.split(';').reject { |dir| dir.strip.empty? }.each_with_object({}) do |dir, hash|
parts = dir.strip.split(/\s+/)
hash[parts.first] = parts[1..]
end
end
end

View File

@ -1368,13 +1368,5 @@ RSpec.describe MergeRequestsFinder, feature_category: :code_review_workflow do
it { is_expected.to include(banned_merge_request) }
end
context 'when the `hide_merge_requests_from_banned_users` feature flag is disabled' do
before do
stub_feature_flags(hide_merge_requests_from_banned_users: false)
end
it { is_expected.to include(banned_merge_request) }
end
end
end

View File

@ -82,7 +82,7 @@ describe('TabView', () => {
});
apolloClient = mockApollo.defaultClient;
jest.spyOn(apolloClient, 'resetStore');
jest.spyOn(apolloClient, 'clearStore');
};
const findProjectsList = () => wrapper.findComponent(ProjectsList);
@ -154,10 +154,16 @@ describe('TabView', () => {
findProjectsList().vm.$emit('refetch');
});
it('resets store and refetches list', () => {
expect(apolloClient.resetStore).toHaveBeenCalled();
it('clears store and refetches list', async () => {
expect(apolloClient.clearStore).toHaveBeenCalled();
await waitForPromises();
expect(handler[1]).toHaveBeenCalledTimes(2);
});
it('emits refetch event', async () => {
await waitForPromises();
expect(wrapper.emitted('refetch')).toEqual([[]]);
});
});
});

View File

@ -727,4 +727,18 @@ describe('TabsWithList', () => {
expect(findTabView().props('timestampType')).toBe(expectedTimestampType);
});
});
describe('when refetch event is fired', () => {
beforeEach(async () => {
await createComponent();
await waitForPromises();
await mockApollo.defaultClient.clearStore();
findTabView().vm.$emit('refetch');
await waitForPromises();
});
it('refetches tab counts', () => {
expect(successHandler).toHaveBeenCalledTimes(2);
});
});
});

View File

@ -6,6 +6,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import BranchCollapsibleListbox from '~/projects/pipelines/charts/components/branch_collapsible_listbox.vue';
import getBranchesOptionsQuery from '~/projects/pipelines/charts/graphql/queries/get_branches_options.query.graphql';
import { BRANCH_ANY } from '~/projects/pipelines/charts/constants';
import { createAlert } from '~/alert';
jest.mock('~/alert');
@ -63,7 +64,7 @@ describe('Pipeline editor branch switcher', () => {
infiniteScroll: false,
loading: false,
infiniteScrollLoading: true,
items: [{ text: 'All branches', value: '' }],
items: [{ text: 'All branches', value: BRANCH_ANY }],
searchPlaceholder: 'Filter by branch name',
searchable: true,
searching: false,
@ -96,7 +97,7 @@ describe('Pipeline editor branch switcher', () => {
expect(findGlCollapsibleListbox().exists()).toBe(true);
expect(findGlCollapsibleListbox().props('items')).toEqual([
{ text: 'All branches', value: '' },
{ text: 'All branches', value: BRANCH_ANY },
{ text: 'main', value: 'main' },
{ text: 'feature-branch', value: 'feature-branch' },
]);
@ -155,7 +156,7 @@ describe('Pipeline editor branch switcher', () => {
it('updates items', () => {
expect(findGlCollapsibleListbox().props('items')).toEqual([
{ text: 'All branches', value: '' },
{ text: 'All branches', value: BRANCH_ANY },
{ text: 'main', value: 'main' },
{ text: 'feature-branch', value: 'feature-branch' },
{ text: 'feature-branch-2', value: 'feature-branch-2' },
@ -193,7 +194,6 @@ describe('Pipeline editor branch switcher', () => {
it('shows selected branch', () => {
expect(findGlCollapsibleListbox().props('selected')).toBe('feature-branch');
expect(findGlCollapsibleListbox().props('toggleText')).toBe('feature-branch');
expect(findListboxItem(2).text()).toBe('feature-branch');
expect(findListboxItem(2).props('isSelected')).toBe(true);
@ -211,7 +211,7 @@ describe('Pipeline editor branch switcher', () => {
it('shows "All" option', () => {
expect(findGlCollapsibleListbox().props('items')).toEqual([
{ text: 'All branches', value: '' },
{ text: 'All branches', value: BRANCH_ANY },
]);
});

View File

@ -29,8 +29,28 @@ describe('PipelinesDashboardClickhouseFilters', () => {
};
describe('input', () => {
it('does not emit immediately', () => {
createComponent();
beforeEach(() => {
createComponent({
props: { value: { source: 'PUSH', dateRange: '30d', branch: 'my-branch-0' } },
});
});
it('sets values, and does not emit @input', () => {
expect(findCollapsibleListbox('pipeline-source').props('selected')).toBe('PUSH');
expect(findBranchCollapsibleListbox().props('selected')).toBe('my-branch-0');
expect(findCollapsibleListbox('date-range').props('selected')).toBe('30d');
expect(wrapper.emitted('input')).toBeUndefined();
});
it('reacts to changes in value, and does not emit @input', async () => {
wrapper.setProps({ value: { source: 'SCHEDULE', dateRange: '180d', branch: 'my-branch-1' } });
await nextTick();
expect(findCollapsibleListbox('pipeline-source').props('selected')).toBe('SCHEDULE');
expect(findBranchCollapsibleListbox().props('selected')).toBe('my-branch-1');
expect(findCollapsibleListbox('date-range').props('selected')).toBe('180d');
expect(wrapper.emitted('input')).toBeUndefined();
});
});
@ -70,7 +90,7 @@ describe('PipelinesDashboardClickhouseFilters', () => {
});
it('is "Any" by default', () => {
expect(findCollapsibleListbox('pipeline-source').props('selected')).toBe('ANY');
expect(findCollapsibleListbox('pipeline-source').props('selected')).toBe(null);
});
it('sets selected value', () => {
@ -94,7 +114,7 @@ describe('PipelinesDashboardClickhouseFilters', () => {
},
});
expect(findCollapsibleListbox('pipeline-source').props('selected')).toBe('ANY');
expect(findCollapsibleListbox('pipeline-source').props('selected')).toBe(null);
});
it('emits when an option is selected', async () => {
@ -103,7 +123,7 @@ describe('PipelinesDashboardClickhouseFilters', () => {
await nextTick();
expect(wrapper.emitted('input')[0][0]).toEqual({
branch: defaultBranch,
branch: null,
dateRange: '7d',
source: 'PUSH',
});
@ -117,15 +137,15 @@ describe('PipelinesDashboardClickhouseFilters', () => {
it('shows listbox with default branch as default value', () => {
expect(findBranchCollapsibleListbox().props()).toMatchObject({
selected: defaultBranch,
selected: null,
defaultBranch,
projectPath,
projectBranchCount,
});
});
it('is the default branch by default', () => {
expect(findBranchCollapsibleListbox().props('selected')).toBe(defaultBranch);
it('is no branch by default', () => {
expect(findBranchCollapsibleListbox().props('selected')).toBe(null);
});
it('sets selected value', () => {
@ -148,7 +168,7 @@ describe('PipelinesDashboardClickhouseFilters', () => {
expect(wrapper.emitted('input')[0][0]).toEqual({
branch: 'my-branch-1',
dateRange: '7d',
source: 'ANY',
source: null,
});
});
});
@ -189,8 +209,8 @@ describe('PipelinesDashboardClickhouseFilters', () => {
expect(wrapper.emitted('input')[0][0]).toEqual({
dateRange: '90d',
branch: defaultBranch,
source: 'ANY',
branch: null,
source: null,
});
});
});

View File

@ -7,10 +7,11 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import PipelinesDashboardClickhouse from '~/projects/pipelines/charts/components/pipelines_dashboard_clickhouse.vue';
import {
SOURCE_ANY,
SOURCE_PUSH,
DATE_RANGE_7_DAYS,
BRANCH_ANY,
DATE_RANGE_DEFAULT,
DATE_RANGE_30_DAYS,
DATE_RANGE_180_DAYS,
} from '~/projects/pipelines/charts/constants';
import PipelinesDashboardClickhouseFilters from '~/projects/pipelines/charts/components/pipelines_dashboard_clickhouse_filters.vue';
import StatisticsList from '~/projects/pipelines/charts/components/statistics_list.vue';
@ -18,12 +19,19 @@ import PipelineDurationChart from '~/projects/pipelines/charts/components/pipeli
import PipelineStatusChart from '~/projects/pipelines/charts/components/pipeline_status_chart.vue';
import getPipelineAnalyticsQuery from '~/projects/pipelines/charts/graphql/queries/get_pipeline_analytics.query.graphql';
import { createAlert } from '~/alert';
import { updateHistory } from '~/lib/utils/url_utility';
import { useFakeDate } from 'helpers/fake_date';
import { pipelineAnalyticsEmptyData, pipelineAnalyticsData } from 'jest/analytics/ci_cd/mock_data';
import setWindowLocation from 'helpers/set_window_location_helper';
Vue.use(VueApollo);
jest.mock('~/alert');
jest.mock('~/lib/utils/url_utility', () => ({
...jest.requireActual('~/lib/utils/url_utility'),
updateHistory: jest.fn(),
}));
const projectPath = 'gitlab-org/gitlab';
const defaultBranch = 'main';
const projectBranchCount = 99;
@ -73,55 +81,139 @@ describe('PipelinesDashboardClickhouse', () => {
});
describe('filters', () => {
beforeEach(() => {
createComponent();
});
describe('default filters', () => {
beforeEach(() => {
createComponent();
});
it('sets default filters', () => {
expect(findPipelinesDashboardClickhouseFilters().props()).toEqual({
defaultBranch: 'main',
projectBranchCount: 99,
projectPath: 'gitlab-org/gitlab',
value: {
source: SOURCE_ANY,
it('sets default filters', () => {
expect(findPipelinesDashboardClickhouseFilters().props()).toEqual({
defaultBranch,
projectBranchCount: 99,
projectPath: 'gitlab-org/gitlab',
value: {
source: null,
branch: defaultBranch,
dateRange: DATE_RANGE_DEFAULT,
},
});
});
it('requests with default filters', async () => {
await waitForPromises();
expect(getPipelineAnalyticsHandler).toHaveBeenCalledTimes(1);
expect(getPipelineAnalyticsHandler).toHaveBeenLastCalledWith({
fullPath: projectPath,
source: null,
branch: defaultBranch,
dateRange: DATE_RANGE_7_DAYS,
fromTime: new Date('2022-02-08'),
toTime: new Date('2022-02-15'),
});
});
});
describe('filters can be bookmarked', () => {
const tests = [
{
name: 'only default branch',
input: {
source: null,
dateRange: DATE_RANGE_DEFAULT,
branch: defaultBranch,
},
variables: {
source: null,
fullPath: projectPath,
branch: defaultBranch,
fromTime: new Date('2022-02-08'),
toTime: new Date('2022-02-15'),
},
query: '',
},
});
{
name: 'the last 30 days',
input: {
source: null,
dateRange: DATE_RANGE_30_DAYS,
branch: BRANCH_ANY,
},
variables: {
source: null,
fullPath: projectPath,
branch: null,
fromTime: new Date('2022-01-16'),
toTime: new Date('2022-02-15'),
},
query: '?branch=~any&time=30d',
},
{
name: 'feature branch pushes in the last 180 days',
input: {
source: SOURCE_PUSH,
dateRange: DATE_RANGE_180_DAYS,
branch: 'feature-branch',
},
variables: {
source: SOURCE_PUSH,
fullPath: projectPath,
branch: 'feature-branch',
fromTime: new Date('2021-08-19'),
toTime: new Date('2022-02-15'),
},
query: '?branch=feature-branch&source=PUSH&time=180d',
},
];
it.each(tests)(
'filters by "$name", updating query to "$query"',
async ({ input, variables, query }) => {
createComponent();
findPipelinesDashboardClickhouseFilters().vm.$emit('input', input);
await waitForPromises();
expect(getPipelineAnalyticsHandler).toHaveBeenLastCalledWith(variables);
expect(updateHistory).toHaveBeenLastCalledWith({ url: `http://test.host/${query}` });
},
);
it.each(tests)(
'with query "$query", filters by "$name"',
async ({ input, variables, query }) => {
setWindowLocation(query);
createComponent();
await waitForPromises();
expect(findPipelinesDashboardClickhouseFilters().props('value')).toEqual(input);
expect(getPipelineAnalyticsHandler).toHaveBeenLastCalledWith(variables);
},
);
it.each(tests)(
'responds to history back button for "$query" to filter by "$name"',
async ({ input, variables, query }) => {
createComponent();
setWindowLocation(query);
window.dispatchEvent(new Event('popstate'));
await waitForPromises();
expect(findPipelinesDashboardClickhouseFilters().props('value')).toEqual(input);
expect(getPipelineAnalyticsHandler).toHaveBeenLastCalledWith(variables);
},
);
});
it('requests with default filters', async () => {
await waitForPromises();
it('removes popstate event listener when destroyed', () => {
const spy = jest.spyOn(window, 'removeEventListener');
expect(getPipelineAnalyticsHandler).toHaveBeenCalledTimes(1);
expect(getPipelineAnalyticsHandler).toHaveBeenLastCalledWith({
source: null,
fullPath: projectPath,
branch: defaultBranch,
fromTime: new Date('2022-02-08'),
toTime: new Date('2022-02-15'),
});
});
createComponent();
wrapper.destroy();
it('when an option is selected, requests with new filters', async () => {
await waitForPromises();
findPipelinesDashboardClickhouseFilters().vm.$emit('input', {
source: SOURCE_PUSH,
dateRange: DATE_RANGE_30_DAYS,
branch: 'feature-branch',
});
await waitForPromises();
expect(getPipelineAnalyticsHandler).toHaveBeenCalledTimes(2);
expect(getPipelineAnalyticsHandler).toHaveBeenLastCalledWith({
source: SOURCE_PUSH,
fullPath: projectPath,
branch: 'feature-branch',
fromTime: new Date('2022-01-16'),
toTime: new Date('2022-02-15'),
});
expect(spy).toHaveBeenCalledWith('popstate', wrapper.vm.updateParamsFromQuery);
});
});

View File

@ -0,0 +1,41 @@
import { updateQueryHistory, paramsFromQuery } from '~/projects/pipelines/charts/url_utils';
import { updateHistory } from '~/lib/utils/url_utility';
jest.mock('~/lib/utils/url_utility', () => ({
...jest.requireActual('~/lib/utils/url_utility'),
updateHistory: jest.fn(),
}));
const defaults = {
source: null,
branch: 'main',
dateRange: '7d',
};
describe('dashboard utils', () => {
const examples = [
{ input: {}, query: '' },
{ input: defaults, query: '' },
{ input: { source: 'PUSH' }, query: '?source=PUSH' },
{ input: { branch: 'feature-branch' }, query: '?branch=feature-branch' },
{ input: { dateRange: '180d' }, query: '?time=180d' },
{
input: { dateRange: '180d', branch: 'feature-branch', source: 'PUSH' },
query: '?branch=feature-branch&source=PUSH&time=180d',
},
];
describe('updateQueryHistory', () => {
it.each(examples)('updates history to "http://test.host/$query"', ({ input, query }) => {
updateQueryHistory(input, defaults);
expect(updateHistory).toHaveBeenLastCalledWith({ url: `http://test.host/${query}` });
});
});
describe('paramsFromQuery', () => {
it.each(examples)('updates history to "http://test.host/$query"', ({ query, input }) => {
expect(paramsFromQuery(query, defaults)).toEqual({ ...defaults, ...input });
});
});
});

View File

@ -3,6 +3,14 @@ import { mount, shallowMount } from '@vue/test-utils';
import ColorPicker from '~/vue_shared/components/color_picker/color_picker.vue';
const BORDER_COLOR_ERROR_MOCK = 'red';
const BORDER_COLOR_DEFAULT_MOCK = 'gray';
jest.mock('~/vue_shared/components/color_picker/constants.js', () => ({
BORDER_COLOR_ERROR: BORDER_COLOR_ERROR_MOCK,
BORDER_COLOR_DEFAULT: BORDER_COLOR_DEFAULT_MOCK,
}));
jest.mock('lodash/uniqueId', () => (prefix) => (prefix ? `${prefix}1` : 1));
describe('ColorPicker', () => {
@ -68,10 +76,11 @@ describe('ColorPicker', () => {
it('by default has no values', () => {
createComponent();
expect(colorPreview().attributes('style')).toBe(undefined);
expect(colorPreview().attributes('style')).toBe(
`border-color: ${BORDER_COLOR_DEFAULT_MOCK};`,
);
expect(colorPicker().props('value')).toBe('');
expect(colorTextInput().props('value')).toBe('');
expect(colorPreview().attributes('class')).toContain('gl-shadow-inner-1-gray-400');
});
it('has a color set on initialization', () => {
@ -92,7 +101,6 @@ describe('ColorPicker', () => {
await colorTextInput().setValue(` ${setColor} `);
expect(wrapper.emitted().input[0]).toStrictEqual([setColor]);
expect(colorPreview().attributes('class')).toContain('gl-shadow-inner-1-gray-400');
expect(colorTextInput().attributes('class')).not.toContain('is-invalid');
});
@ -100,8 +108,8 @@ describe('ColorPicker', () => {
createComponent(mount, { invalidFeedback: invalidText, state: false });
expect(invalidFeedback().text()).toBe(invalidText);
expect(colorPreview().attributes('class')).toContain('gl-shadow-inner-1-red-500');
expect(colorTextInput().attributes('class')).toContain('is-invalid');
expect(colorPreview().attributes('style')).toBe(`border-color: ${BORDER_COLOR_ERROR_MOCK};`);
});
});

View File

@ -63,20 +63,6 @@ RSpec.describe ViteHelper, feature_category: :tooling do
expect(link_tag[:rel]).to eq('stylesheet')
expect(link_tag[:href]).to eq('/vite-dev/stylesheets/styles.application.scss.css')
end
context 'when asset_host is set' do
before do
allow(helper).to receive_message_chain(:config, :asset_host).and_return('http://localhost')
allow(ViteRuby.config).to receive(:host).and_return('localhost')
allow(ViteRuby.config).to receive(:port).and_return(3808)
end
it 'replaces the asset_host with the configured Vite host' do
expect(link_tag[:rel]).to eq('stylesheet')
expect(link_tag[:href]).to eq('http://localhost:3808/vite-dev/stylesheets/styles.application.scss.css')
end
end
end
end
@ -107,4 +93,36 @@ RSpec.describe ViteHelper, feature_category: :tooling do
end
end
end
describe '#vite_origin' do
before do
allow(ViteRuby).to receive_message_chain(:config, :origin).and_return('origin')
end
it { expect(helper.vite_origin).to eq('origin') }
end
describe '#vite_hmr_ws_origin' do
before do
allow(ViteRuby).to receive_message_chain(:config, :host_with_port).and_return('host')
allow(ViteRuby).to receive_message_chain(:config, :https).and_return(https)
end
context 'with https' do
let(:https) { true }
it 'returns wss origin' do
expect(helper.vite_hmr_ws_origin).to eq('wss://host')
end
end
context 'without https' do
let(:https) { false }
it 'returns ws origin' do
allow(ViteRuby).to receive_message_chain(:config, :https).and_return(false)
expect(helper.vite_hmr_ws_origin).to eq('ws://host')
end
end
end
end

View File

@ -1,223 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillMissingNamespaceIdOnNotes,
:migration_with_transaction,
feature_category: :code_review_workflow do
let(:namespaces_table) { table(:namespaces) }
let(:notes_table) { table(:notes) }
let(:projects_table) { table(:projects) }
let(:snippets_table) { table(:snippets) }
let(:users_table) { table(:users) }
let(:epics_table) { table(:epics) }
let(:issues_table) { table(:issues) }
let(:work_item_types_table) { table(:work_item_types) }
let(:organizations_table) { table(:organizations) }
let!(:organization) { organizations_table.create!(name: 'organization', path: 'organization') }
let(:namespace_1) do
namespaces_table.create!(
name: 'namespace',
path: 'namespace-path-1',
organization_id: organization.id
)
end
let(:project_namespace_2) do
namespaces_table.create!(
name: 'namespace',
path: 'namespace-path-2',
type: 'Project',
organization_id: organization.id
)
end
let!(:project_1) do
projects_table
.create!(
name: 'project1',
path: 'path1',
namespace_id: namespace_1.id,
project_namespace_id: project_namespace_2.id,
visibility_level: 0,
organization_id: organization.id
)
end
let!(:user_1) { users_table.create!(name: 'bob', email: 'bob@example.com', projects_limit: 1) }
before do
# This test shares the db connection to establish it's fixtures, resulting in
# incorrect connection usage, so we're skipping it.
# Consult https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180764 for more info.
skip_if_multiple_databases_are_setup(:sec)
end
context "when namespace_id is derived from note.project_id" do
let(:alert_management_alert_note) do
notes_table.create!(project_id: project_1.id, noteable_type: "AlertManagement::Alert")
end
let(:commit_note) { notes_table.create!(project_id: project_1.id, noteable_type: "Commit") }
let(:merge_request_note) { notes_table.create!(project_id: project_1.id, noteable_type: "MergeRequest") }
let(:vulnerability_note) { notes_table.create!(project_id: project_1.id, noteable_type: "Vulnerability") }
let(:design_note) { notes_table.create!(project_id: project_1.id, noteable_type: "Design") }
let(:work_item_note) { notes_table.create!(project_id: project_1.id, noteable_type: "WorkItem") }
let(:issue_note) { notes_table.create!(project_id: project_1.id, noteable_type: "Issue") }
it "updates the namespace_id" do
[
alert_management_alert_note,
commit_note,
merge_request_note,
vulnerability_note,
design_note,
work_item_note,
issue_note
].each do |test_note|
expect(test_note.project_id).not_to be_nil
test_note.update_columns(namespace_id: nil)
test_note.reload
expect(test_note.namespace_id).to be_nil
described_class.new(
start_id: test_note.id,
end_id: test_note.id,
batch_table: :notes,
batch_column: :id,
sub_batch_size: 1,
pause_ms: 0,
connection: ActiveRecord::Base.connection
).perform
test_note.reload
expect(test_note.namespace_id).not_to be_nil
expect(test_note.namespace_id).to eq(Project.find(test_note.project_id).namespace_id)
end
end
end
context "when namespace_id is derived from noteable.author.namespace_id" do
let!(:snippet) do
snippets_table.create!(
author_id: user_1.id,
project_id: project_1.id
)
end
let(:personal_snippet_note) do
notes_table.create!(author_id: user_1.id, noteable_type: "Snippet", noteable_id: snippet.id)
end
let(:project_snippet_note) do
notes_table.create!(author_id: user_1.id, noteable_type: "Snippet", noteable_id: snippet.id)
end
let!(:user_namespace) do
namespaces_table.create!(
name: 'namespace',
path: 'user-namespace-path',
type: 'User',
owner_id: user_1.id,
organization_id: organization.id
)
end
it "updates the namespace_id" do
[project_snippet_note, personal_snippet_note].each do |test_note|
test_note.update_columns(namespace_id: nil)
test_note.reload
expect(test_note.namespace_id).to be_nil
described_class.new(
start_id: test_note.id,
end_id: test_note.id,
batch_table: :notes,
batch_column: :id,
sub_batch_size: 1,
pause_ms: 0,
connection: ActiveRecord::Base.connection
).perform
test_note.reload
expect(test_note.namespace_id).not_to be_nil
expect(test_note.namespace_id).to eq(user_namespace.id)
end
end
end
context "when namespace_id is derived from noteable.id" do
let!(:group_namespace) do
namespaces_table.create!(
name: 'namespace',
path: 'group-namespace-path',
type: 'Group',
owner_id: user_1.id,
organization_id: organization.id
)
end
let!(:work_items_type) do
work_item_types_table.find_by(name: 'Issue')
end
let!(:issue) do
issues_table.create!(
title: "Example Epic",
author_id: user_1.id,
namespace_id: group_namespace.id,
work_item_type_id: work_items_type.id
)
end
let!(:epic) do
epics_table.create!(
title: "Example Epic",
group_id: group_namespace.id,
author_id: user_1.id,
iid: Random.random_number(4000),
title_html: "<blink>Example</blink>",
issue_id: issue.id
)
end
let(:epic_note) do
notes_table.create!(
namespace_id: group_namespace.id,
noteable_type: "Epic",
noteable_id: epic.id
)
end
it "updates the namespace_id" do
[epic_note].each do |test_note|
test_note.update_columns(namespace_id: nil)
test_note.reload
expect(test_note.namespace_id).to be_nil
described_class.new(
start_id: test_note.id,
end_id: test_note.id,
batch_table: :notes,
batch_column: :id,
sub_batch_size: 1,
pause_ms: 0,
connection: ActiveRecord::Base.connection
).perform
test_note.reload
expect(test_note.namespace_id).not_to be_nil
expect(test_note.namespace_id).to eq(group_namespace.id)
end
end
end
end

View File

@ -138,6 +138,27 @@ RSpec.describe Gitlab::Database::Partitioning::ReplaceTable, '#perform', feature
end
end
context 'when the source table is owned by a user with non-alphanumeric characters' do
let(:special_owner) { 'random-table-ownér$#%' }
let(:replace_table_instance) do
described_class.new(connection, original_table, replacement_table, archived_table, 'id')
end
it 'fails when owner name is not quoted' do
unquoted_sql = "ALTER TABLE #{connection.quote_table_name(original_table)} OWNER TO #{special_owner}"
expect do
connection.execute(unquoted_sql)
end.to raise_error(ActiveRecord::StatementInvalid, /syntax error/)
end
it 'properly quotes both table name and owner name' do
sql = replace_table_instance.send(:set_table_owner_statement, original_table, special_owner)
expect(sql).to eq("ALTER TABLE \"#{original_table}\" OWNER TO \"#{special_owner}\"")
end
end
def partitions_for_parent_table(table)
Gitlab::Database::PostgresPartition.for_parent_table(table)
end

View File

@ -24,12 +24,10 @@ RSpec.describe ViteGdk, feature_category: :tooling do
end.and_return(true)
expect(YAML).to receive(:safe_load_file) do |file_path|
expect(file_path).to end_with(VITE_GDK_CONFIG_FILEPATH)
end.and_return('enabled' => true, 'port' => 3038, 'host' => 'gdk.test')
expect(ViteRuby).to receive(:configure).with(host: 'gdk.test', port: 3038)
end.and_return('enabled' => true, 'port' => 3038, 'host' => '127.0.0.1', 'public_host' => 'gdk.test')
expect(ViteRuby).to receive(:configure).with(host: 'gdk.test', https: false, port: 3038)
expect(ViteRuby.env).to receive(:[]=).with('VITE_ENABLED', 'true')
expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_HOST', 'gdk.test')
expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_HTTP_URL', 'http://gdk.test:3038')
expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_WS_URL', 'ws://gdk.test:3038')
described_class.load_gdk_vite_config
end
@ -43,43 +41,50 @@ RSpec.describe ViteGdk, feature_category: :tooling do
}
end
it 'configures ViteRuby with HMR settings' do
it 'ViteRuby uses same host for hmr' do
expect(File).to receive(:exist?) do |file_path|
expect(file_path).to end_with(VITE_GDK_CONFIG_FILEPATH)
end.and_return(true)
expect(YAML).to receive(:safe_load_file) do |file_path|
expect(file_path).to end_with(VITE_GDK_CONFIG_FILEPATH)
end.and_return('enabled' => true, 'port' => 3038, 'host' => 'gdk.test', 'hmr' => hmr_config)
expect(ViteRuby).to receive(:configure).with(host: 'gdk.test', port: 3038)
end.and_return(
'enabled' => true,
'port' => 3038,
'host' => '127.0.0.1',
'public_host' => 'gdk.test',
'hmr' => hmr_config)
expect(ViteRuby).to receive(:configure).with(host: 'gdk.test', https: false, port: 3038)
expect(ViteRuby.env).to receive(:[]=).with('VITE_ENABLED', 'true')
expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_HOST', 'hmr.gdk.test')
expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_HTTP_URL', 'https://hmr.gdk.test:9999')
expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_WS_URL', 'wss://hmr.gdk.test:9999')
expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_HOST', 'gdk.test')
described_class.load_gdk_vite_config
end
end
context 'when HMR config has no port' do
let(:hmr_config) do
context 'when HTTPS config is present' do
let(:https_config) do
{
'host' => 'hmr.gdk.test',
'protocol' => 'wss'
'enabled' => true,
'key' => 'key',
'certificate' => 'certificate'
}
end
it 'configures ViteRuby with default port' do
it 'enables HTTPS' do
expect(File).to receive(:exist?) do |file_path|
expect(file_path).to end_with(VITE_GDK_CONFIG_FILEPATH)
end.and_return(true)
expect(YAML).to receive(:safe_load_file) do |file_path|
expect(file_path).to end_with(VITE_GDK_CONFIG_FILEPATH)
end.and_return('enabled' => true, 'port' => 3038, 'host' => 'gdk.test', 'hmr' => hmr_config)
expect(ViteRuby).to receive(:configure).with(host: 'gdk.test', port: 3038)
end.and_return(
'enabled' => true,
'port' => 3038,
'host' => '127.0.0.1',
'public_host' => 'gdk.test',
'https' => https_config)
expect(ViteRuby).to receive(:configure).with(host: 'gdk.test', https: true, port: 3038)
expect(ViteRuby.env).to receive(:[]=).with('VITE_ENABLED', 'true')
expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_HOST', 'hmr.gdk.test')
expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_HTTP_URL', 'https://hmr.gdk.test:3038')
expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_WS_URL', 'wss://hmr.gdk.test:3038')
expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_HOST', 'gdk.test')
described_class.load_gdk_vite_config
end

View File

@ -359,16 +359,6 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
it 'only returns public issuables' do
expect(described_class.without_hidden).not_to include(hidden_merge_request)
end
context 'when feature flag is disabled' do
before do
stub_feature_flags(hide_merge_requests_from_banned_users: false)
end
it 'returns public and hidden issuables' do
expect(described_class.without_hidden).to include(hidden_merge_request)
end
end
end
describe '.merged_without_state_event_source' do
@ -6578,14 +6568,6 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
let_it_be(:author) { create(:user, :banned) }
it { is_expected.to eq(true) }
context 'when the feature flag is disabled' do
before do
stub_feature_flags(hide_merge_requests_from_banned_users: false)
end
it { is_expected.to eq(false) }
end
end
end

View File

@ -632,19 +632,5 @@ RSpec.describe MergeRequestPolicy, feature_category: :code_review_workflow do
it 'allows admin to read the merge_request', :enable_admin_mode do
expect(permissions(admin, hidden_merge_request)).to be_allowed(:read_merge_request)
end
context 'when the `hide_merge_requests_from_banned_users` feature flag is disabled' do
before do
stub_feature_flags(hide_merge_requests_from_banned_users: false)
end
it 'allows non-admin users to read the merge_request' do
expect(permissions(user, hidden_merge_request)).to be_allowed(:read_merge_request)
end
it 'allows admin users to read the merge_request', :enable_admin_mode do
expect(permissions(admin, hidden_merge_request)).to be_allowed(:read_merge_request)
end
end
end
end

View File

@ -282,4 +282,39 @@ RSpec.describe ApplicationController, type: :request, feature_category: :shared
end
end
end
context 'when configuring vite' do
let(:vite_hmr_ws_origin) { 'ws://gitlab.example.com:3808' }
let(:vite_origin) { 'http://gitlab.example.com:3808' }
before do
# rubocop:disable RSpec/AnyInstanceOf -- Doesn't work with allow_next_instance_of
allow_any_instance_of(ViteHelper)
.to receive_messages(
vite_enabled?: vite_enabled,
vite_hmr_ws_origin: vite_hmr_ws_origin,
vite_origin: vite_origin,
universal_path_to_stylesheet: '')
# rubocop:enable RSpec/AnyInstanceOf
end
context 'when vite enabled during development' do
let(:vite_enabled) { true }
it 'adds vite csp' do
get root_path
expect(response.headers['Content-Security-Policy']).to include("#{vite_hmr_ws_origin}/vite-dev/")
expect(response.headers['Content-Security-Policy']).to include("#{vite_origin}/vite-dev/")
end
end
context 'when vite is disabled' do
let(:vite_enabled) { false }
it "doesn't add vite csp" do
get root_path
expect(response.headers['Content-Security-Policy']).not_to include('/vite-dev/')
end
end
end
end

View File

@ -44,7 +44,7 @@ RSpec.describe 'Merge Requests Diffs stream', feature_category: :code_review_wor
context 'when accessed' do
it 'passes hash of options to #diffs_for_streaming' do
expect_next_instance_of(::Projects::MergeRequests::DiffsStreamController) do |controller|
context = {}
context = controller.view_context
allow(controller).to receive(:view_context).and_return(context)
expect(controller).to receive(:stream_diff_files)
.with(diff_options_hash, context)

View File

@ -5,7 +5,7 @@ module ContentSecurityPolicyHelpers
# 1. call that's being tested
# 2. call in ApplicationController
def setup_csp_for_controller(
controller_class, csp = ActionDispatch::ContentSecurityPolicy.new, times: 2,
controller_class, csp = ActionDispatch::ContentSecurityPolicy.new, times: 3,
any_time: false)
expect_next_instance_of(controller_class) do |controller|
if any_time

View File

@ -45,45 +45,6 @@ RSpec.shared_examples 'Base action controller' do
it_behaves_like 'snowplow is not in the CSP'
end
end
context 'when configuring vite' do
let(:vite_hmr_websocket_url) { "ws://gitlab.example.com:3808" }
let(:vite_hmr_http_url) { "http://gitlab.example.com:3808" }
let(:vite_gitlab_url) { Gitlab::Utils.append_path(Gitlab.config.gitlab.url, 'vite-dev/') }
context 'when vite enabled during development',
skip: 'https://gitlab.com/gitlab-org/gitlab/-/issues/424334' do
before do
stub_rails_env('development')
allow(ViteHelper).to receive(:vite_enabled?).and_return(true)
allow(BaseActionController.helpers).to receive(:vite_enabled?).and_return(true)
allow(BaseActionController.helpers).to receive(:vite_hmr_websocket_url).and_return(vite_hmr_websocket_url)
allow(BaseActionController.helpers).to receive(:vite_hmr_http_url).and_return(vite_hmr_http_url)
end
it 'adds vite csp' do
request
expect(response.headers['Content-Security-Policy']).to include("#{vite_hmr_websocket_url}/vite-dev/")
expect(response.headers['Content-Security-Policy']).to include("#{vite_hmr_http_url}/vite-dev/")
expect(response.headers['Content-Security-Policy']).to include(vite_gitlab_url)
end
end
context 'when vite disabled' do
before do
allow(BaseActionController.helpers).to receive(:vite_enabled?).and_return(false)
end
it "doesn't add vite csp" do
request
expect(response.headers['Content-Security-Policy']).not_to include(vite_hmr_websocket_url)
expect(response.headers['Content-Security-Policy']).not_to include(vite_hmr_http_url)
expect(response.headers['Content-Security-Policy']).not_to include(vite_gitlab_url)
end
end
end
end
end
end

View File

@ -124,9 +124,7 @@ export default defineConfig({
'process.env.GITLAB_WEB_IDE_PUBLIC_PATH': JSON.stringify(GITLAB_WEB_IDE_PUBLIC_PATH),
'window.IS_VITE': JSON.stringify(true),
'window.VUE_DEVTOOLS_CONFIG.openInEditorHost': JSON.stringify(
viteGDKConfig.hmr
? `${process.env.VITE_HMR_HTTP_URL}/vite-dev/`
: `http://${viteGDKConfig.host}:${viteGDKConfig.port}/vite-dev/`,
`${viteGDKConfig.https?.enabled ? 'https' : 'http'}://${viteGDKConfig.public_host}:${viteGDKConfig.port}/vite-dev/`,
),
'process.env.PDF_JS_WORKER_PUBLIC_PATH': JSON.stringify(PDF_JS_WORKER_PUBLIC_PATH),
'process.env.PDF_JS_CMAPS_UBLIC_PATH': JSON.stringify(PDF_JS_CMAPS_PUBLIC_PATH),
@ -136,8 +134,12 @@ export default defineConfig({
warmup: {
clientFiles: ['javascripts/entrypoints/main.js', 'javascripts/entrypoints/super_sidebar.js'],
},
hmr: viteGDKConfig.hmr,
https: false,
https: viteGDKConfig.https?.enabled
? {
key: viteGDKConfig.https?.key,
cert: viteGDKConfig.https?.certificate,
}
: false,
watch:
viteGDKConfig.hmr === null
? null