+
{
- if Feature.enabled?(:hide_merge_requests_from_banned_users)
- where_not_exists(Users::BannedUser.where('merge_requests.author_id = banned_users.user_id'))
- else
- all
- end
+ where_not_exists(Users::BannedUser.where('merge_requests.author_id = banned_users.user_id'))
}
scope :merged_without_state_event_source, -> {
@@ -2451,7 +2447,7 @@ class MergeRequest < ApplicationRecord
end
def hidden?
- Feature.enabled?(:hide_merge_requests_from_banned_users) && author&.banned?
+ author&.banned?
end
def diffs_batch_cache_with_max_age?
diff --git a/config/feature_flags/development/hide_merge_requests_from_banned_users.yml b/config/feature_flags/development/hide_merge_requests_from_banned_users.yml
deleted file mode 100644
index 7ba8475e607..00000000000
--- a/config/feature_flags/development/hide_merge_requests_from_banned_users.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-name: hide_merge_requests_from_banned_users
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/107836
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/386726
-milestone: "15.8"
-type: development
-group: group::authorization
-default_enabled: false
diff --git a/config/vite.json b/config/vite.json
index 0d9584e734b..998de2f931d 100644
--- a/config/vite.json
+++ b/config/vite.json
@@ -14,7 +14,7 @@
"ee/images/*",
"jh/images/*"
],
- "port": 3038,
+ "skipProxy": true,
"publicOutputDir": "vite-dev",
"devServerConnectTimeout": 3
}
diff --git a/db/migrate/20250423075634_cleanup_backfill_missing_namespace_id_on_notes.rb b/db/migrate/20250423075634_cleanup_backfill_missing_namespace_id_on_notes.rb
new file mode 100644
index 00000000000..4e039b98167
--- /dev/null
+++ b/db/migrate/20250423075634_cleanup_backfill_missing_namespace_id_on_notes.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class CleanupBackfillMissingNamespaceIdOnNotes < Gitlab::Database::Migration[2.2]
+ milestone '18.0'
+
+ MIGRATION = 'BackfillMissingNamespaceIdOnNotes'
+
+ restrict_gitlab_migration gitlab_schema: :gitlab_main
+
+ def up
+ # rubocop:disable Migration/BatchMigrationsPostOnly -- Delete in a migration rather than post_migration
+ # to delete the batched migration before it might be enqueued
+ delete_batched_background_migration(MIGRATION, :notes, :id, [])
+ # rubocop:enable Migration/BatchMigrationsPostOnly
+ end
+
+ def down; end
+end
diff --git a/db/post_migrate/20240822220027_queue_backfill_missing_namespace_id_on_notes.rb b/db/post_migrate/20240822220027_queue_backfill_missing_namespace_id_on_notes.rb
index c83f1a97a46..9928b5dbd6e 100644
--- a/db/post_migrate/20240822220027_queue_backfill_missing_namespace_id_on_notes.rb
+++ b/db/post_migrate/20240822220027_queue_backfill_missing_namespace_id_on_notes.rb
@@ -17,19 +17,11 @@ class QueueBackfillMissingNamespaceIdOnNotes < Gitlab::Database::Migration[2.2]
GITLAB_OPTIMIZED_BATCH_SIZE = 75_000
GITLAB_OPTIMIZED_SUB_BATCH_SIZE = 250
- def up
- queue_batched_background_migration(
- MIGRATION,
- :notes,
- :id,
- job_interval: DELAY_INTERVAL,
- **batch_sizes
- )
- end
+ # No longer needed as we are now going to backfill only non project notes
+ # gitlab.com/gitlab-org/gitlab/-/issues/444222
+ def up; end
- def down
- delete_batched_background_migration(MIGRATION, :notes, :id, [])
- end
+ def down; end
private
diff --git a/db/schema_migrations/20250423075634 b/db/schema_migrations/20250423075634
new file mode 100644
index 00000000000..f1aa0d4b7c5
--- /dev/null
+++ b/db/schema_migrations/20250423075634
@@ -0,0 +1 @@
+217682c74afb328af59dd6cf7c5283ed485b9ba7868d34881e1c096aaee5398f
\ No newline at end of file
diff --git a/doc/administration/moderate_users.md b/doc/administration/moderate_users.md
index 9569e7d4222..336f5d1f43f 100644
--- a/doc/administration/moderate_users.md
+++ b/doc/administration/moderate_users.md
@@ -307,6 +307,7 @@ Users can also be reactivated using the [GitLab API](../api/user_moderation.md#r
- Hiding merge requests of banned users [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/107836) in GitLab 15.8 [with a flag](feature_flags.md) named `hide_merge_requests_from_banned_users`. Disabled by default.
- Hiding comments of banned users [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/112973) in GitLab 15.11 [with a flag](feature_flags.md) named `hidden_notes`. Disabled by default.
- Hiding projects of banned users [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/121488) in GitLab 16.2 [with a flag](feature_flags.md) named `hide_projects_of_banned_users`. Disabled by default.
+- Hiding merge requests of banned users [generally available](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/188770) in GitLab 18.0. Feature flag `hide_merge_requests_from_banned_users` removed.
{{< /history >}}
diff --git a/doc/integration/advanced_search/elasticsearch.md b/doc/integration/advanced_search/elasticsearch.md
index a5a250354ba..ca096ecdf78 100644
--- a/doc/integration/advanced_search/elasticsearch.md
+++ b/doc/integration/advanced_search/elasticsearch.md
@@ -799,9 +799,11 @@ scoped to a group or project return no results.
## Advanced search migrations
-With reindex migrations running in the background, there's no need for a manual
-intervention. This usually happens in situations where new features are added to
-advanced search, which means adding or changing the way content is indexed.
+Reindex migrations run in the background, which means
+you do not have to reindex the instance manually.
+With the `elastic_migration_worker_enabled` application setting,
+you can turn on or off the migration worker.
+By default, the migration worker is on.
### Migration dictionary files
diff --git a/doc/security/two_factor_authentication.md b/doc/security/two_factor_authentication.md
index 1f710e0a061..078c1296e71 100644
--- a/doc/security/two_factor_authentication.md
+++ b/doc/security/two_factor_authentication.md
@@ -251,7 +251,7 @@ On GitLab Self-Managed, by default this feature is not available. To make it ava
{{< /alert >}}
You can enforce 2FA for [Git over SSH operations](../development/gitlab_shell/features.md#git-operations). However, you should use
-[ED25519_SK](../user/ssh.md#ed25519_sk-ssh-keys) or [ECDSA_SK](../user/ssh.md#ecdsa_sk-ssh-keys) SSH keys instead. 2FA is enforced for Git operations only, and internal commands such as [`personal_access_token`](../development/gitlab_shell/features.md#personal-access-token) are excluded.
+[ED25519_SK](../user/ssh.md#ed25519_sk-ssh-keys) or [ECDSA_SK](../user/ssh.md#ecdsa_sk-ssh-keys) SSH keys instead. 2FA is enforced for Git operations only, and internal commands from GitLab Shell such as `personal_access_token` are excluded.
To perform one-time password (OTP) verification, run:
diff --git a/lib/gitlab/background_migration/backfill_missing_namespace_id_on_notes.rb b/lib/gitlab/background_migration/backfill_missing_namespace_id_on_notes.rb
deleted file mode 100644
index 58e29346a3a..00000000000
--- a/lib/gitlab/background_migration/backfill_missing_namespace_id_on_notes.rb
+++ /dev/null
@@ -1,93 +0,0 @@
-# frozen_string_literal: true
-
-module Gitlab
- module BackgroundMigration
- class BackfillMissingNamespaceIdOnNotes < BatchedMigrationJob
- operation_name :backfill_missing_namespace_id_on_notes
- feature_category :code_review_workflow
-
- def perform
- each_sub_batch do |sub_batch|
- Gitlab::Database.allow_cross_joins_across_databases(
- url: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/163687'
- ) do
- connection.execute(build_query(sub_batch))
- end
- end
- end
-
- private
-
- # rubocop:disable Layout/LineLength -- SQL!
- # rubocop:disable Metrics/MethodLength -- I do what I want
- def build_query(scope)
- records_query = scope.where(namespace_id: nil).select("
- id,
- (
- coalesce(
- (case
- when exists (select 1 from projects where id = notes.project_id) then (select namespace_id from projects where id = notes.project_id)
- when noteable_type = 'AlertManagement::Alert' then (select namespace_id from projects where id = (select project_id from alert_management_alerts where noteable_id = notes.id limit 1) limit 1)
- when noteable_type = 'MergeRequest' then (select namespace_id from projects where id = (select project_id from merge_requests where noteable_id = notes.id limit 1) limit 1)
- when noteable_type = 'Vulnerability' then (select namespace_id from projects where id = (select project_id from vulnerabilities where noteable_id = notes.id limit 1) limit 1)
- -- These 2 need to pull namespace_id from the noteable
- when noteable_type = 'DesignManagement::Design' then (select namespace_id from design_management_designs where id = notes.noteable_id limit 1)
- when noteable_type = 'Issue' then (select namespace_id from issues where id = notes.noteable_id limit 1)
- -- Epics pull in group_id
- when noteable_type = 'Epic' then (select group_id from epics where id = notes.noteable_id limit 1)
- -- Snippets pull from author
- when noteable_type = 'Snippet' then (select id from namespaces where owner_id = (select author_id from notes where id = notes.id limit 1) limit 1)
- -- Commits pull namespace_id from the project of the note
- when noteable_type = 'Commit' then (select namespace_id from projects where id = notes.project_id limit 1)
- else
- -1
- end
- ), -1)) as namespace_id_to_set
- ")
-
- <<~SQL
- with records AS (
- #{records_query.to_sql}
- ), updated_rows as (
- -- updating records with the located namespace_id_to_set value
- update notes set namespace_id = namespace_id_to_set from records where records.id=notes.id and namespace_id_to_set <> -1
- ), deleted_rows as (
- -- deleting the records where we couldn't find the namespace id
- delete from notes where id IN (select id from records where namespace_id_to_set = -1)
- )
- select 1
- SQL
- end
- # rubocop:enable Layout/LineLength
- # rubocop:enable Metrics/MethodLength
-
- def backfillable?(note)
- note.noteable_type.present?
- end
-
- def extract_namespace_id(note)
- # Attempt to find namespace_id from the project first.
- #
- if note.project_id
- project = Project.find_by_id(note.project_id)
-
- return project.namespace_id if project
- end
-
- # We have to load the noteable here because we don't have access to the
- # usual ActiveRecord relationships to do it for us.
- #
- noteable = note.noteable_type.constantize.find(note.noteable_id)
-
- case note.noteable_type
- when "AlertManagement::Alert", "Commit", "MergeRequest", "Vulnerability"
- noteable.project.namespace_id
- when "DesignManagement::Design", "Epic", "Issue"
- noteable.namespace_id
- when "Snippet"
- noteable.author.namespace_id
- end
- end
- end
- end
-end
diff --git a/lib/vite_gdk.rb b/lib/vite_gdk.rb
index 9b70fca0146..28ab1de9415 100644
--- a/lib/vite_gdk.rb
+++ b/lib/vite_gdk.rb
@@ -15,23 +15,13 @@ module ViteGdk
return unless enabled
# From https://vitejs.dev/config/server-options
- host = config['host'] || 'localhost'
- port = Integer(config['port'] || 3808)
- hmr_config = config['hmr'] || {}
- hmr_host = hmr_config['host'] || host
- hmr_port = hmr_config['clientPort'] || hmr_config['port'] || port
- hmr_ws_protocol = hmr_config['protocol'] || 'ws'
- hmr_http_protocol = hmr_ws_protocol == 'wss' ? 'https' : 'http'
- ViteRuby.env['VITE_HMR_HOST'] = hmr_host
- # If the Websocket connection to the HMR host is not up, Vite will attempt to
- # ping the HMR host via HTTP or HTTPS:
- # https://github.com/vitejs/vite/blob/899d9b1d272b7057aafc6fa01570d40f288a473b/packages/vite/src/client/client.ts#L320-L327
- ViteRuby.env['VITE_HMR_HTTP_URL'] = "#{hmr_http_protocol}://#{hmr_host}:#{hmr_port}"
- ViteRuby.env['VITE_HMR_WS_URL'] = "#{hmr_ws_protocol}://#{hmr_host}:#{hmr_port}"
+ host = config['public_host'] || 'localhost'
+ ViteRuby.env['VITE_HMR_HOST'] = host
ViteRuby.configure(
host: host,
- port: port
+ port: Integer(config['port'] || 3808),
+ https: config.fetch('https', { 'enabled' => false })['enabled']
)
end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 3ffba62386e..777cbc448b4 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -5723,6 +5723,9 @@ msgstr ""
msgid "AiPowered|Connection method"
msgstr ""
+msgid "AiPowered|Contact sales for Duo Enterprise"
+msgstr ""
+
msgid "AiPowered|Direct connections"
msgstr ""
@@ -5771,6 +5774,9 @@ msgstr ""
msgid "AiPowered|GitLab Duo Core available to all users"
msgstr ""
+msgid "AiPowered|GitLab Duo Pro or Enterprise"
+msgstr ""
+
msgid "AiPowered|GitLab Duo Self-Hosted"
msgstr ""
@@ -5810,6 +5816,9 @@ msgstr ""
msgid "AiPowered|Participate in the Early Access Program and help make GitLab better"
msgstr ""
+msgid "AiPowered|Purchase Duo Pro seats"
+msgstr ""
+
msgid "AiPowered|Seat assignment for GitLab Duo has moved"
msgstr ""
@@ -5825,9 +5834,18 @@ msgstr ""
msgid "AiPowered|Start date: %{startDate}"
msgstr ""
+msgid "AiPowered|Tanuki AI icon"
+msgstr ""
+
msgid "AiPowered|Turn on experiment and beta GitLab Duo features"
msgstr ""
+msgid "AiPowered|Unlock advanced AI-powered capabilities with the Premium or Ultimate tier designed for your development needs."
+msgstr ""
+
+msgid "AiPowered|Upgrade to"
+msgstr ""
+
msgid "AiPowered|Use beta models and features in GitLab Duo Self-Hosted"
msgstr ""
@@ -30823,6 +30841,9 @@ msgstr ""
msgid "How do I use a web terminal?"
msgstr ""
+msgid "How does it work?"
+msgstr ""
+
msgid "How does pull mirroring work?"
msgstr ""
@@ -35534,12 +35555,6 @@ msgstr ""
msgid "Learn more about GitLab"
msgstr ""
-msgid "Learn more about Root Cause Analysis"
-msgstr ""
-
-msgid "Learn more about Root Cause Analysis in new tab"
-msgstr ""
-
msgid "Learn more about Service Desk"
msgstr ""
@@ -49591,9 +49606,6 @@ msgstr ""
msgid "Quick start guide"
msgstr ""
-msgid "Quickly identify the root cause of an incident using AI-assisted analysis."
-msgstr ""
-
msgid "README"
msgstr ""
@@ -51347,9 +51359,6 @@ msgstr ""
msgid "Rollback"
msgstr ""
-msgid "Root Cause Analysis"
-msgstr ""
-
msgid "Root Moved Permanently redirection response"
msgstr ""
@@ -61163,6 +61172,9 @@ msgstr ""
msgid "There are no closed merge requests"
msgstr ""
+msgid "There are no comments to summarize."
+msgstr ""
+
msgid "There are no commits yet"
msgstr ""
@@ -62173,9 +62185,6 @@ msgstr ""
msgid "This repository was last checked %{last_check_timestamp}. The check passed."
msgstr ""
-msgid "This resource has no comments to summarize"
-msgstr ""
-
msgid "This setting can be overridden in each project."
msgstr ""
@@ -63934,6 +63943,9 @@ msgstr ""
msgid "Troubleshoot failed CI/CD jobs with Root Cause Analysis."
msgstr ""
+msgid "Troubleshoot failed jobs with Root Cause Analysis"
+msgstr ""
+
msgid "True"
msgstr ""
@@ -65163,6 +65175,9 @@ msgstr ""
msgid "Use .gitlab-ci.yml"
msgstr ""
+msgid "Use AI to quickly identify the cause of job failures and get example fixes to get your pipeline running."
+msgstr ""
+
msgid "Use Amazon Q to streamline development workflow and project upgrades"
msgstr ""
diff --git a/spec/features/jira_connect/subscriptions_spec.rb b/spec/features/jira_connect/subscriptions_spec.rb
index 8686234df01..d5de37c7f6e 100644
--- a/spec/features/jira_connect/subscriptions_spec.rb
+++ b/spec/features/jira_connect/subscriptions_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'Subscriptions Content Security Policy', feature_category: :integ
let(:qsh) { Atlassian::Jwt.create_query_string_hash('https://gitlab.test/subscriptions', 'GET', 'https://gitlab.test') }
let(:jwt) { Atlassian::Jwt.encode({ iss: installation.client_key, qsh: qsh }, installation.shared_secret) }
- subject { response_headers['Content-Security-Policy'] }
+ subject(:csp) { parse_csp response_headers['Content-Security-Policy'] }
context 'when there is no global config' do
before do
@@ -36,9 +36,19 @@ RSpec.describe 'Subscriptions Content Security Policy', feature_category: :integ
it 'appends to CSP directives' do
visit jira_connect_subscriptions_path(jwt: jwt)
- is_expected.to include("frame-ancestors 'self' https://*.atlassian.net https://*.jira.com")
- is_expected.to include("script-src 'self' https://some-cdn.test https://connect-cdn.atl-paas.net")
- is_expected.to include("style-src 'self' https://some-cdn.test 'unsafe-inline'")
+ frame_ancestors = "'self' https://*.atlassian.net https://*.jira.com".split(' ')
+ script_src = "'self' https://some-cdn.test https://connect-cdn.atl-paas.net".split(' ')
+ style_src = "'self' https://some-cdn.test 'unsafe-inline'".split(' ')
+ expect(csp['frame-ancestors']).to include(*frame_ancestors)
+ expect(csp['script-src']).to include(*script_src)
+ expect(csp['style-src']).to include(*style_src)
+ end
+ end
+
+ def parse_csp(csp)
+ csp.split(';').reject { |dir| dir.strip.empty? }.each_with_object({}) do |dir, hash|
+ parts = dir.strip.split(/\s+/)
+ hash[parts.first] = parts[1..]
end
end
end
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index 159f750607e..5e51be9b27c 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -1368,13 +1368,5 @@ RSpec.describe MergeRequestsFinder, feature_category: :code_review_workflow do
it { is_expected.to include(banned_merge_request) }
end
-
- context 'when the `hide_merge_requests_from_banned_users` feature flag is disabled' do
- before do
- stub_feature_flags(hide_merge_requests_from_banned_users: false)
- end
-
- it { is_expected.to include(banned_merge_request) }
- end
end
end
diff --git a/spec/frontend/groups_projects/components/tab_view_spec.js b/spec/frontend/groups_projects/components/tab_view_spec.js
index 4a1c615673f..dea5a581ec8 100644
--- a/spec/frontend/groups_projects/components/tab_view_spec.js
+++ b/spec/frontend/groups_projects/components/tab_view_spec.js
@@ -82,7 +82,7 @@ describe('TabView', () => {
});
apolloClient = mockApollo.defaultClient;
- jest.spyOn(apolloClient, 'resetStore');
+ jest.spyOn(apolloClient, 'clearStore');
};
const findProjectsList = () => wrapper.findComponent(ProjectsList);
@@ -154,10 +154,16 @@ describe('TabView', () => {
findProjectsList().vm.$emit('refetch');
});
- it('resets store and refetches list', () => {
- expect(apolloClient.resetStore).toHaveBeenCalled();
+ it('clears store and refetches list', async () => {
+ expect(apolloClient.clearStore).toHaveBeenCalled();
+ await waitForPromises();
expect(handler[1]).toHaveBeenCalledTimes(2);
});
+
+ it('emits refetch event', async () => {
+ await waitForPromises();
+ expect(wrapper.emitted('refetch')).toEqual([[]]);
+ });
});
});
diff --git a/spec/frontend/groups_projects/components/tabs_with_list_spec.js b/spec/frontend/groups_projects/components/tabs_with_list_spec.js
index b6333fc3cc9..90ca781a5e8 100644
--- a/spec/frontend/groups_projects/components/tabs_with_list_spec.js
+++ b/spec/frontend/groups_projects/components/tabs_with_list_spec.js
@@ -727,4 +727,18 @@ describe('TabsWithList', () => {
expect(findTabView().props('timestampType')).toBe(expectedTimestampType);
});
});
+
+ describe('when refetch event is fired', () => {
+ beforeEach(async () => {
+ await createComponent();
+ await waitForPromises();
+ await mockApollo.defaultClient.clearStore();
+ findTabView().vm.$emit('refetch');
+ await waitForPromises();
+ });
+
+ it('refetches tab counts', () => {
+ expect(successHandler).toHaveBeenCalledTimes(2);
+ });
+ });
});
diff --git a/spec/frontend/projects/pipelines/charts/components/branch_collapsible_listbox_spec.js b/spec/frontend/projects/pipelines/charts/components/branch_collapsible_listbox_spec.js
index 1f8306507c9..1aefec7e0c5 100644
--- a/spec/frontend/projects/pipelines/charts/components/branch_collapsible_listbox_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/branch_collapsible_listbox_spec.js
@@ -6,6 +6,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import BranchCollapsibleListbox from '~/projects/pipelines/charts/components/branch_collapsible_listbox.vue';
import getBranchesOptionsQuery from '~/projects/pipelines/charts/graphql/queries/get_branches_options.query.graphql';
+import { BRANCH_ANY } from '~/projects/pipelines/charts/constants';
import { createAlert } from '~/alert';
jest.mock('~/alert');
@@ -63,7 +64,7 @@ describe('Pipeline editor branch switcher', () => {
infiniteScroll: false,
loading: false,
infiniteScrollLoading: true,
- items: [{ text: 'All branches', value: '' }],
+ items: [{ text: 'All branches', value: BRANCH_ANY }],
searchPlaceholder: 'Filter by branch name',
searchable: true,
searching: false,
@@ -96,7 +97,7 @@ describe('Pipeline editor branch switcher', () => {
expect(findGlCollapsibleListbox().exists()).toBe(true);
expect(findGlCollapsibleListbox().props('items')).toEqual([
- { text: 'All branches', value: '' },
+ { text: 'All branches', value: BRANCH_ANY },
{ text: 'main', value: 'main' },
{ text: 'feature-branch', value: 'feature-branch' },
]);
@@ -155,7 +156,7 @@ describe('Pipeline editor branch switcher', () => {
it('updates items', () => {
expect(findGlCollapsibleListbox().props('items')).toEqual([
- { text: 'All branches', value: '' },
+ { text: 'All branches', value: BRANCH_ANY },
{ text: 'main', value: 'main' },
{ text: 'feature-branch', value: 'feature-branch' },
{ text: 'feature-branch-2', value: 'feature-branch-2' },
@@ -193,7 +194,6 @@ describe('Pipeline editor branch switcher', () => {
it('shows selected branch', () => {
expect(findGlCollapsibleListbox().props('selected')).toBe('feature-branch');
- expect(findGlCollapsibleListbox().props('toggleText')).toBe('feature-branch');
expect(findListboxItem(2).text()).toBe('feature-branch');
expect(findListboxItem(2).props('isSelected')).toBe(true);
@@ -211,7 +211,7 @@ describe('Pipeline editor branch switcher', () => {
it('shows "All" option', () => {
expect(findGlCollapsibleListbox().props('items')).toEqual([
- { text: 'All branches', value: '' },
+ { text: 'All branches', value: BRANCH_ANY },
]);
});
diff --git a/spec/frontend/projects/pipelines/charts/components/pipelines_dashboard_clickhouse_filters_spec.js b/spec/frontend/projects/pipelines/charts/components/pipelines_dashboard_clickhouse_filters_spec.js
index a48703507a6..75669b29fb9 100644
--- a/spec/frontend/projects/pipelines/charts/components/pipelines_dashboard_clickhouse_filters_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/pipelines_dashboard_clickhouse_filters_spec.js
@@ -29,8 +29,28 @@ describe('PipelinesDashboardClickhouseFilters', () => {
};
describe('input', () => {
- it('does not emit immediately', () => {
- createComponent();
+ beforeEach(() => {
+ createComponent({
+ props: { value: { source: 'PUSH', dateRange: '30d', branch: 'my-branch-0' } },
+ });
+ });
+
+ it('sets values, and does not emit @input', () => {
+ expect(findCollapsibleListbox('pipeline-source').props('selected')).toBe('PUSH');
+ expect(findBranchCollapsibleListbox().props('selected')).toBe('my-branch-0');
+ expect(findCollapsibleListbox('date-range').props('selected')).toBe('30d');
+
+ expect(wrapper.emitted('input')).toBeUndefined();
+ });
+
+ it('reacts to changes in value, and does not emit @input', async () => {
+ wrapper.setProps({ value: { source: 'SCHEDULE', dateRange: '180d', branch: 'my-branch-1' } });
+ await nextTick();
+
+ expect(findCollapsibleListbox('pipeline-source').props('selected')).toBe('SCHEDULE');
+ expect(findBranchCollapsibleListbox().props('selected')).toBe('my-branch-1');
+ expect(findCollapsibleListbox('date-range').props('selected')).toBe('180d');
+
expect(wrapper.emitted('input')).toBeUndefined();
});
});
@@ -70,7 +90,7 @@ describe('PipelinesDashboardClickhouseFilters', () => {
});
it('is "Any" by default', () => {
- expect(findCollapsibleListbox('pipeline-source').props('selected')).toBe('ANY');
+ expect(findCollapsibleListbox('pipeline-source').props('selected')).toBe(null);
});
it('sets selected value', () => {
@@ -94,7 +114,7 @@ describe('PipelinesDashboardClickhouseFilters', () => {
},
});
- expect(findCollapsibleListbox('pipeline-source').props('selected')).toBe('ANY');
+ expect(findCollapsibleListbox('pipeline-source').props('selected')).toBe(null);
});
it('emits when an option is selected', async () => {
@@ -103,7 +123,7 @@ describe('PipelinesDashboardClickhouseFilters', () => {
await nextTick();
expect(wrapper.emitted('input')[0][0]).toEqual({
- branch: defaultBranch,
+ branch: null,
dateRange: '7d',
source: 'PUSH',
});
@@ -117,15 +137,15 @@ describe('PipelinesDashboardClickhouseFilters', () => {
it('shows listbox with default branch as default value', () => {
expect(findBranchCollapsibleListbox().props()).toMatchObject({
- selected: defaultBranch,
+ selected: null,
defaultBranch,
projectPath,
projectBranchCount,
});
});
- it('is the default branch by default', () => {
- expect(findBranchCollapsibleListbox().props('selected')).toBe(defaultBranch);
+ it('is no branch by default', () => {
+ expect(findBranchCollapsibleListbox().props('selected')).toBe(null);
});
it('sets selected value', () => {
@@ -148,7 +168,7 @@ describe('PipelinesDashboardClickhouseFilters', () => {
expect(wrapper.emitted('input')[0][0]).toEqual({
branch: 'my-branch-1',
dateRange: '7d',
- source: 'ANY',
+ source: null,
});
});
});
@@ -189,8 +209,8 @@ describe('PipelinesDashboardClickhouseFilters', () => {
expect(wrapper.emitted('input')[0][0]).toEqual({
dateRange: '90d',
- branch: defaultBranch,
- source: 'ANY',
+ branch: null,
+ source: null,
});
});
});
diff --git a/spec/frontend/projects/pipelines/charts/components/pipelines_dashboard_clickhouse_spec.js b/spec/frontend/projects/pipelines/charts/components/pipelines_dashboard_clickhouse_spec.js
index 6bf765f338a..9bf1c69903e 100644
--- a/spec/frontend/projects/pipelines/charts/components/pipelines_dashboard_clickhouse_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/pipelines_dashboard_clickhouse_spec.js
@@ -7,10 +7,11 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import PipelinesDashboardClickhouse from '~/projects/pipelines/charts/components/pipelines_dashboard_clickhouse.vue';
import {
- SOURCE_ANY,
SOURCE_PUSH,
- DATE_RANGE_7_DAYS,
+ BRANCH_ANY,
+ DATE_RANGE_DEFAULT,
DATE_RANGE_30_DAYS,
+ DATE_RANGE_180_DAYS,
} from '~/projects/pipelines/charts/constants';
import PipelinesDashboardClickhouseFilters from '~/projects/pipelines/charts/components/pipelines_dashboard_clickhouse_filters.vue';
import StatisticsList from '~/projects/pipelines/charts/components/statistics_list.vue';
@@ -18,12 +19,19 @@ import PipelineDurationChart from '~/projects/pipelines/charts/components/pipeli
import PipelineStatusChart from '~/projects/pipelines/charts/components/pipeline_status_chart.vue';
import getPipelineAnalyticsQuery from '~/projects/pipelines/charts/graphql/queries/get_pipeline_analytics.query.graphql';
import { createAlert } from '~/alert';
+import { updateHistory } from '~/lib/utils/url_utility';
import { useFakeDate } from 'helpers/fake_date';
import { pipelineAnalyticsEmptyData, pipelineAnalyticsData } from 'jest/analytics/ci_cd/mock_data';
+import setWindowLocation from 'helpers/set_window_location_helper';
Vue.use(VueApollo);
jest.mock('~/alert');
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
+ updateHistory: jest.fn(),
+}));
+
const projectPath = 'gitlab-org/gitlab';
const defaultBranch = 'main';
const projectBranchCount = 99;
@@ -73,55 +81,139 @@ describe('PipelinesDashboardClickhouse', () => {
});
describe('filters', () => {
- beforeEach(() => {
- createComponent();
- });
+ describe('default filters', () => {
+ beforeEach(() => {
+ createComponent();
+ });
- it('sets default filters', () => {
- expect(findPipelinesDashboardClickhouseFilters().props()).toEqual({
- defaultBranch: 'main',
- projectBranchCount: 99,
- projectPath: 'gitlab-org/gitlab',
- value: {
- source: SOURCE_ANY,
+ it('sets default filters', () => {
+ expect(findPipelinesDashboardClickhouseFilters().props()).toEqual({
+ defaultBranch,
+ projectBranchCount: 99,
+ projectPath: 'gitlab-org/gitlab',
+ value: {
+ source: null,
+ branch: defaultBranch,
+ dateRange: DATE_RANGE_DEFAULT,
+ },
+ });
+ });
+
+ it('requests with default filters', async () => {
+ await waitForPromises();
+
+ expect(getPipelineAnalyticsHandler).toHaveBeenCalledTimes(1);
+ expect(getPipelineAnalyticsHandler).toHaveBeenLastCalledWith({
+ fullPath: projectPath,
+ source: null,
branch: defaultBranch,
- dateRange: DATE_RANGE_7_DAYS,
+ fromTime: new Date('2022-02-08'),
+ toTime: new Date('2022-02-15'),
+ });
+ });
+ });
+
+ describe('filters can be bookmarked', () => {
+ const tests = [
+ {
+ name: 'only default branch',
+ input: {
+ source: null,
+ dateRange: DATE_RANGE_DEFAULT,
+ branch: defaultBranch,
+ },
+ variables: {
+ source: null,
+ fullPath: projectPath,
+ branch: defaultBranch,
+ fromTime: new Date('2022-02-08'),
+ toTime: new Date('2022-02-15'),
+ },
+ query: '',
},
- });
+ {
+ name: 'the last 30 days',
+ input: {
+ source: null,
+ dateRange: DATE_RANGE_30_DAYS,
+ branch: BRANCH_ANY,
+ },
+ variables: {
+ source: null,
+ fullPath: projectPath,
+ branch: null,
+ fromTime: new Date('2022-01-16'),
+ toTime: new Date('2022-02-15'),
+ },
+ query: '?branch=~any&time=30d',
+ },
+ {
+ name: 'feature branch pushes in the last 180 days',
+ input: {
+ source: SOURCE_PUSH,
+ dateRange: DATE_RANGE_180_DAYS,
+ branch: 'feature-branch',
+ },
+ variables: {
+ source: SOURCE_PUSH,
+ fullPath: projectPath,
+ branch: 'feature-branch',
+ fromTime: new Date('2021-08-19'),
+ toTime: new Date('2022-02-15'),
+ },
+ query: '?branch=feature-branch&source=PUSH&time=180d',
+ },
+ ];
+
+ it.each(tests)(
+ 'filters by "$name", updating query to "$query"',
+ async ({ input, variables, query }) => {
+ createComponent();
+ findPipelinesDashboardClickhouseFilters().vm.$emit('input', input);
+
+ await waitForPromises();
+
+ expect(getPipelineAnalyticsHandler).toHaveBeenLastCalledWith(variables);
+
+ expect(updateHistory).toHaveBeenLastCalledWith({ url: `http://test.host/${query}` });
+ },
+ );
+
+ it.each(tests)(
+ 'with query "$query", filters by "$name"',
+ async ({ input, variables, query }) => {
+ setWindowLocation(query);
+ createComponent();
+
+ await waitForPromises();
+
+ expect(findPipelinesDashboardClickhouseFilters().props('value')).toEqual(input);
+ expect(getPipelineAnalyticsHandler).toHaveBeenLastCalledWith(variables);
+ },
+ );
+
+ it.each(tests)(
+ 'responds to history back button for "$query" to filter by "$name"',
+ async ({ input, variables, query }) => {
+ createComponent();
+
+ setWindowLocation(query);
+ window.dispatchEvent(new Event('popstate'));
+ await waitForPromises();
+
+ expect(findPipelinesDashboardClickhouseFilters().props('value')).toEqual(input);
+ expect(getPipelineAnalyticsHandler).toHaveBeenLastCalledWith(variables);
+ },
+ );
});
- it('requests with default filters', async () => {
- await waitForPromises();
+ it('removes popstate event listener when destroyed', () => {
+ const spy = jest.spyOn(window, 'removeEventListener');
- expect(getPipelineAnalyticsHandler).toHaveBeenCalledTimes(1);
- expect(getPipelineAnalyticsHandler).toHaveBeenLastCalledWith({
- source: null,
- fullPath: projectPath,
- branch: defaultBranch,
- fromTime: new Date('2022-02-08'),
- toTime: new Date('2022-02-15'),
- });
- });
+ createComponent();
+ wrapper.destroy();
- it('when an option is selected, requests with new filters', async () => {
- await waitForPromises();
-
- findPipelinesDashboardClickhouseFilters().vm.$emit('input', {
- source: SOURCE_PUSH,
- dateRange: DATE_RANGE_30_DAYS,
- branch: 'feature-branch',
- });
-
- await waitForPromises();
-
- expect(getPipelineAnalyticsHandler).toHaveBeenCalledTimes(2);
- expect(getPipelineAnalyticsHandler).toHaveBeenLastCalledWith({
- source: SOURCE_PUSH,
- fullPath: projectPath,
- branch: 'feature-branch',
- fromTime: new Date('2022-01-16'),
- toTime: new Date('2022-02-15'),
- });
+ expect(spy).toHaveBeenCalledWith('popstate', wrapper.vm.updateParamsFromQuery);
});
});
diff --git a/spec/frontend/projects/pipelines/charts/utils_spec.js b/spec/frontend/projects/pipelines/charts/utils_spec.js
new file mode 100644
index 00000000000..74e1699061f
--- /dev/null
+++ b/spec/frontend/projects/pipelines/charts/utils_spec.js
@@ -0,0 +1,41 @@
+import { updateQueryHistory, paramsFromQuery } from '~/projects/pipelines/charts/url_utils';
+import { updateHistory } from '~/lib/utils/url_utility';
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
+ updateHistory: jest.fn(),
+}));
+
+const defaults = {
+ source: null,
+ branch: 'main',
+ dateRange: '7d',
+};
+
+describe('dashboard utils', () => {
+ const examples = [
+ { input: {}, query: '' },
+ { input: defaults, query: '' },
+ { input: { source: 'PUSH' }, query: '?source=PUSH' },
+ { input: { branch: 'feature-branch' }, query: '?branch=feature-branch' },
+ { input: { dateRange: '180d' }, query: '?time=180d' },
+ {
+ input: { dateRange: '180d', branch: 'feature-branch', source: 'PUSH' },
+ query: '?branch=feature-branch&source=PUSH&time=180d',
+ },
+ ];
+
+ describe('updateQueryHistory', () => {
+ it.each(examples)('updates history to "http://test.host/$query"', ({ input, query }) => {
+ updateQueryHistory(input, defaults);
+
+ expect(updateHistory).toHaveBeenLastCalledWith({ url: `http://test.host/${query}` });
+ });
+ });
+
+ describe('paramsFromQuery', () => {
+ it.each(examples)('updates history to "http://test.host/$query"', ({ query, input }) => {
+ expect(paramsFromQuery(query, defaults)).toEqual({ ...defaults, ...input });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js b/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
index 440984993a5..9c87672cd29 100644
--- a/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
+++ b/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
@@ -3,6 +3,14 @@ import { mount, shallowMount } from '@vue/test-utils';
import ColorPicker from '~/vue_shared/components/color_picker/color_picker.vue';
+const BORDER_COLOR_ERROR_MOCK = 'red';
+const BORDER_COLOR_DEFAULT_MOCK = 'gray';
+
+jest.mock('~/vue_shared/components/color_picker/constants.js', () => ({
+ BORDER_COLOR_ERROR: BORDER_COLOR_ERROR_MOCK,
+ BORDER_COLOR_DEFAULT: BORDER_COLOR_DEFAULT_MOCK,
+}));
+
jest.mock('lodash/uniqueId', () => (prefix) => (prefix ? `${prefix}1` : 1));
describe('ColorPicker', () => {
@@ -68,10 +76,11 @@ describe('ColorPicker', () => {
it('by default has no values', () => {
createComponent();
- expect(colorPreview().attributes('style')).toBe(undefined);
+ expect(colorPreview().attributes('style')).toBe(
+ `border-color: ${BORDER_COLOR_DEFAULT_MOCK};`,
+ );
expect(colorPicker().props('value')).toBe('');
expect(colorTextInput().props('value')).toBe('');
- expect(colorPreview().attributes('class')).toContain('gl-shadow-inner-1-gray-400');
});
it('has a color set on initialization', () => {
@@ -92,7 +101,6 @@ describe('ColorPicker', () => {
await colorTextInput().setValue(` ${setColor} `);
expect(wrapper.emitted().input[0]).toStrictEqual([setColor]);
- expect(colorPreview().attributes('class')).toContain('gl-shadow-inner-1-gray-400');
expect(colorTextInput().attributes('class')).not.toContain('is-invalid');
});
@@ -100,8 +108,8 @@ describe('ColorPicker', () => {
createComponent(mount, { invalidFeedback: invalidText, state: false });
expect(invalidFeedback().text()).toBe(invalidText);
- expect(colorPreview().attributes('class')).toContain('gl-shadow-inner-1-red-500');
expect(colorTextInput().attributes('class')).toContain('is-invalid');
+ expect(colorPreview().attributes('style')).toBe(`border-color: ${BORDER_COLOR_ERROR_MOCK};`);
});
});
diff --git a/spec/helpers/vite_helper_spec.rb b/spec/helpers/vite_helper_spec.rb
index 0543b06735a..89e529969f1 100644
--- a/spec/helpers/vite_helper_spec.rb
+++ b/spec/helpers/vite_helper_spec.rb
@@ -63,20 +63,6 @@ RSpec.describe ViteHelper, feature_category: :tooling do
expect(link_tag[:rel]).to eq('stylesheet')
expect(link_tag[:href]).to eq('/vite-dev/stylesheets/styles.application.scss.css')
end
-
- context 'when asset_host is set' do
- before do
- allow(helper).to receive_message_chain(:config, :asset_host).and_return('http://localhost')
-
- allow(ViteRuby.config).to receive(:host).and_return('localhost')
- allow(ViteRuby.config).to receive(:port).and_return(3808)
- end
-
- it 'replaces the asset_host with the configured Vite host' do
- expect(link_tag[:rel]).to eq('stylesheet')
- expect(link_tag[:href]).to eq('http://localhost:3808/vite-dev/stylesheets/styles.application.scss.css')
- end
- end
end
end
@@ -107,4 +93,36 @@ RSpec.describe ViteHelper, feature_category: :tooling do
end
end
end
+
+ describe '#vite_origin' do
+ before do
+ allow(ViteRuby).to receive_message_chain(:config, :origin).and_return('origin')
+ end
+
+ it { expect(helper.vite_origin).to eq('origin') }
+ end
+
+ describe '#vite_hmr_ws_origin' do
+ before do
+ allow(ViteRuby).to receive_message_chain(:config, :host_with_port).and_return('host')
+ allow(ViteRuby).to receive_message_chain(:config, :https).and_return(https)
+ end
+
+ context 'with https' do
+ let(:https) { true }
+
+ it 'returns wss origin' do
+ expect(helper.vite_hmr_ws_origin).to eq('wss://host')
+ end
+ end
+
+ context 'without https' do
+ let(:https) { false }
+
+ it 'returns ws origin' do
+ allow(ViteRuby).to receive_message_chain(:config, :https).and_return(false)
+ expect(helper.vite_hmr_ws_origin).to eq('ws://host')
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_missing_namespace_id_on_notes_spec.rb b/spec/lib/gitlab/background_migration/backfill_missing_namespace_id_on_notes_spec.rb
deleted file mode 100644
index 523656eaad9..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_missing_namespace_id_on_notes_spec.rb
+++ /dev/null
@@ -1,223 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillMissingNamespaceIdOnNotes,
- :migration_with_transaction,
- feature_category: :code_review_workflow do
- let(:namespaces_table) { table(:namespaces) }
- let(:notes_table) { table(:notes) }
- let(:projects_table) { table(:projects) }
- let(:snippets_table) { table(:snippets) }
- let(:users_table) { table(:users) }
- let(:epics_table) { table(:epics) }
- let(:issues_table) { table(:issues) }
- let(:work_item_types_table) { table(:work_item_types) }
- let(:organizations_table) { table(:organizations) }
-
- let!(:organization) { organizations_table.create!(name: 'organization', path: 'organization') }
-
- let(:namespace_1) do
- namespaces_table.create!(
- name: 'namespace',
- path: 'namespace-path-1',
- organization_id: organization.id
- )
- end
-
- let(:project_namespace_2) do
- namespaces_table.create!(
- name: 'namespace',
- path: 'namespace-path-2',
- type: 'Project',
- organization_id: organization.id
- )
- end
-
- let!(:project_1) do
- projects_table
- .create!(
- name: 'project1',
- path: 'path1',
- namespace_id: namespace_1.id,
- project_namespace_id: project_namespace_2.id,
- visibility_level: 0,
- organization_id: organization.id
- )
- end
-
- let!(:user_1) { users_table.create!(name: 'bob', email: 'bob@example.com', projects_limit: 1) }
-
- before do
- # This test shares the db connection to establish it's fixtures, resulting in
- # incorrect connection usage, so we're skipping it.
- # Consult https://gitlab.com/gitlab-org/gitlab/-/merge_requests/180764 for more info.
- skip_if_multiple_databases_are_setup(:sec)
- end
-
- context "when namespace_id is derived from note.project_id" do
- let(:alert_management_alert_note) do
- notes_table.create!(project_id: project_1.id, noteable_type: "AlertManagement::Alert")
- end
-
- let(:commit_note) { notes_table.create!(project_id: project_1.id, noteable_type: "Commit") }
- let(:merge_request_note) { notes_table.create!(project_id: project_1.id, noteable_type: "MergeRequest") }
- let(:vulnerability_note) { notes_table.create!(project_id: project_1.id, noteable_type: "Vulnerability") }
- let(:design_note) { notes_table.create!(project_id: project_1.id, noteable_type: "Design") }
- let(:work_item_note) { notes_table.create!(project_id: project_1.id, noteable_type: "WorkItem") }
- let(:issue_note) { notes_table.create!(project_id: project_1.id, noteable_type: "Issue") }
-
- it "updates the namespace_id" do
- [
- alert_management_alert_note,
- commit_note,
- merge_request_note,
- vulnerability_note,
- design_note,
- work_item_note,
- issue_note
- ].each do |test_note|
- expect(test_note.project_id).not_to be_nil
-
- test_note.update_columns(namespace_id: nil)
- test_note.reload
-
- expect(test_note.namespace_id).to be_nil
-
- described_class.new(
- start_id: test_note.id,
- end_id: test_note.id,
- batch_table: :notes,
- batch_column: :id,
- sub_batch_size: 1,
- pause_ms: 0,
- connection: ActiveRecord::Base.connection
- ).perform
-
- test_note.reload
-
- expect(test_note.namespace_id).not_to be_nil
- expect(test_note.namespace_id).to eq(Project.find(test_note.project_id).namespace_id)
- end
- end
- end
-
- context "when namespace_id is derived from noteable.author.namespace_id" do
- let!(:snippet) do
- snippets_table.create!(
- author_id: user_1.id,
- project_id: project_1.id
- )
- end
-
- let(:personal_snippet_note) do
- notes_table.create!(author_id: user_1.id, noteable_type: "Snippet", noteable_id: snippet.id)
- end
-
- let(:project_snippet_note) do
- notes_table.create!(author_id: user_1.id, noteable_type: "Snippet", noteable_id: snippet.id)
- end
-
- let!(:user_namespace) do
- namespaces_table.create!(
- name: 'namespace',
- path: 'user-namespace-path',
- type: 'User',
- owner_id: user_1.id,
- organization_id: organization.id
- )
- end
-
- it "updates the namespace_id" do
- [project_snippet_note, personal_snippet_note].each do |test_note|
- test_note.update_columns(namespace_id: nil)
- test_note.reload
-
- expect(test_note.namespace_id).to be_nil
-
- described_class.new(
- start_id: test_note.id,
- end_id: test_note.id,
- batch_table: :notes,
- batch_column: :id,
- sub_batch_size: 1,
- pause_ms: 0,
- connection: ActiveRecord::Base.connection
- ).perform
-
- test_note.reload
-
- expect(test_note.namespace_id).not_to be_nil
- expect(test_note.namespace_id).to eq(user_namespace.id)
- end
- end
- end
-
- context "when namespace_id is derived from noteable.id" do
- let!(:group_namespace) do
- namespaces_table.create!(
- name: 'namespace',
- path: 'group-namespace-path',
- type: 'Group',
- owner_id: user_1.id,
- organization_id: organization.id
- )
- end
-
- let!(:work_items_type) do
- work_item_types_table.find_by(name: 'Issue')
- end
-
- let!(:issue) do
- issues_table.create!(
- title: "Example Epic",
- author_id: user_1.id,
- namespace_id: group_namespace.id,
- work_item_type_id: work_items_type.id
- )
- end
-
- let!(:epic) do
- epics_table.create!(
- title: "Example Epic",
- group_id: group_namespace.id,
- author_id: user_1.id,
- iid: Random.random_number(4000),
- title_html: "",
- issue_id: issue.id
- )
- end
-
- let(:epic_note) do
- notes_table.create!(
- namespace_id: group_namespace.id,
- noteable_type: "Epic",
- noteable_id: epic.id
- )
- end
-
- it "updates the namespace_id" do
- [epic_note].each do |test_note|
- test_note.update_columns(namespace_id: nil)
- test_note.reload
-
- expect(test_note.namespace_id).to be_nil
-
- described_class.new(
- start_id: test_note.id,
- end_id: test_note.id,
- batch_table: :notes,
- batch_column: :id,
- sub_batch_size: 1,
- pause_ms: 0,
- connection: ActiveRecord::Base.connection
- ).perform
-
- test_note.reload
-
- expect(test_note.namespace_id).not_to be_nil
- expect(test_note.namespace_id).to eq(group_namespace.id)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/database/partitioning/replace_table_spec.rb b/spec/lib/gitlab/database/partitioning/replace_table_spec.rb
index c53fb637bd4..d4e8344c288 100644
--- a/spec/lib/gitlab/database/partitioning/replace_table_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/replace_table_spec.rb
@@ -138,6 +138,27 @@ RSpec.describe Gitlab::Database::Partitioning::ReplaceTable, '#perform', feature
end
end
+ context 'when the source table is owned by a user with non-alphanumeric characters' do
+ let(:special_owner) { 'random-table-ownér$#%' }
+ let(:replace_table_instance) do
+ described_class.new(connection, original_table, replacement_table, archived_table, 'id')
+ end
+
+ it 'fails when owner name is not quoted' do
+ unquoted_sql = "ALTER TABLE #{connection.quote_table_name(original_table)} OWNER TO #{special_owner}"
+
+ expect do
+ connection.execute(unquoted_sql)
+ end.to raise_error(ActiveRecord::StatementInvalid, /syntax error/)
+ end
+
+ it 'properly quotes both table name and owner name' do
+ sql = replace_table_instance.send(:set_table_owner_statement, original_table, special_owner)
+
+ expect(sql).to eq("ALTER TABLE \"#{original_table}\" OWNER TO \"#{special_owner}\"")
+ end
+ end
+
def partitions_for_parent_table(table)
Gitlab::Database::PostgresPartition.for_parent_table(table)
end
diff --git a/spec/lib/vite_gdk_spec.rb b/spec/lib/vite_gdk_spec.rb
index 68bbec613ad..c0582d3cec3 100644
--- a/spec/lib/vite_gdk_spec.rb
+++ b/spec/lib/vite_gdk_spec.rb
@@ -24,12 +24,10 @@ RSpec.describe ViteGdk, feature_category: :tooling do
end.and_return(true)
expect(YAML).to receive(:safe_load_file) do |file_path|
expect(file_path).to end_with(VITE_GDK_CONFIG_FILEPATH)
- end.and_return('enabled' => true, 'port' => 3038, 'host' => 'gdk.test')
- expect(ViteRuby).to receive(:configure).with(host: 'gdk.test', port: 3038)
+ end.and_return('enabled' => true, 'port' => 3038, 'host' => '127.0.0.1', 'public_host' => 'gdk.test')
+ expect(ViteRuby).to receive(:configure).with(host: 'gdk.test', https: false, port: 3038)
expect(ViteRuby.env).to receive(:[]=).with('VITE_ENABLED', 'true')
expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_HOST', 'gdk.test')
- expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_HTTP_URL', 'http://gdk.test:3038')
- expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_WS_URL', 'ws://gdk.test:3038')
described_class.load_gdk_vite_config
end
@@ -43,43 +41,50 @@ RSpec.describe ViteGdk, feature_category: :tooling do
}
end
- it 'configures ViteRuby with HMR settings' do
+ it 'ViteRuby uses same host for hmr' do
expect(File).to receive(:exist?) do |file_path|
expect(file_path).to end_with(VITE_GDK_CONFIG_FILEPATH)
end.and_return(true)
expect(YAML).to receive(:safe_load_file) do |file_path|
expect(file_path).to end_with(VITE_GDK_CONFIG_FILEPATH)
- end.and_return('enabled' => true, 'port' => 3038, 'host' => 'gdk.test', 'hmr' => hmr_config)
- expect(ViteRuby).to receive(:configure).with(host: 'gdk.test', port: 3038)
+ end.and_return(
+ 'enabled' => true,
+ 'port' => 3038,
+ 'host' => '127.0.0.1',
+ 'public_host' => 'gdk.test',
+ 'hmr' => hmr_config)
+ expect(ViteRuby).to receive(:configure).with(host: 'gdk.test', https: false, port: 3038)
expect(ViteRuby.env).to receive(:[]=).with('VITE_ENABLED', 'true')
- expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_HOST', 'hmr.gdk.test')
- expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_HTTP_URL', 'https://hmr.gdk.test:9999')
- expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_WS_URL', 'wss://hmr.gdk.test:9999')
+ expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_HOST', 'gdk.test')
described_class.load_gdk_vite_config
end
end
- context 'when HMR config has no port' do
- let(:hmr_config) do
+ context 'when HTTPS config is present' do
+ let(:https_config) do
{
- 'host' => 'hmr.gdk.test',
- 'protocol' => 'wss'
+ 'enabled' => true,
+ 'key' => 'key',
+ 'certificate' => 'certificate'
}
end
- it 'configures ViteRuby with default port' do
+ it 'enables HTTPS' do
expect(File).to receive(:exist?) do |file_path|
expect(file_path).to end_with(VITE_GDK_CONFIG_FILEPATH)
end.and_return(true)
expect(YAML).to receive(:safe_load_file) do |file_path|
expect(file_path).to end_with(VITE_GDK_CONFIG_FILEPATH)
- end.and_return('enabled' => true, 'port' => 3038, 'host' => 'gdk.test', 'hmr' => hmr_config)
- expect(ViteRuby).to receive(:configure).with(host: 'gdk.test', port: 3038)
+ end.and_return(
+ 'enabled' => true,
+ 'port' => 3038,
+ 'host' => '127.0.0.1',
+ 'public_host' => 'gdk.test',
+ 'https' => https_config)
+ expect(ViteRuby).to receive(:configure).with(host: 'gdk.test', https: true, port: 3038)
expect(ViteRuby.env).to receive(:[]=).with('VITE_ENABLED', 'true')
- expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_HOST', 'hmr.gdk.test')
- expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_HTTP_URL', 'https://hmr.gdk.test:3038')
- expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_WS_URL', 'wss://hmr.gdk.test:3038')
+ expect(ViteRuby.env).to receive(:[]=).with('VITE_HMR_HOST', 'gdk.test')
described_class.load_gdk_vite_config
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 2b888c07121..4167daf2efd 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -359,16 +359,6 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
it 'only returns public issuables' do
expect(described_class.without_hidden).not_to include(hidden_merge_request)
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(hide_merge_requests_from_banned_users: false)
- end
-
- it 'returns public and hidden issuables' do
- expect(described_class.without_hidden).to include(hidden_merge_request)
- end
- end
end
describe '.merged_without_state_event_source' do
@@ -6578,14 +6568,6 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
let_it_be(:author) { create(:user, :banned) }
it { is_expected.to eq(true) }
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(hide_merge_requests_from_banned_users: false)
- end
-
- it { is_expected.to eq(false) }
- end
end
end
diff --git a/spec/policies/merge_request_policy_spec.rb b/spec/policies/merge_request_policy_spec.rb
index 7f4aad48ae4..642842a0aca 100644
--- a/spec/policies/merge_request_policy_spec.rb
+++ b/spec/policies/merge_request_policy_spec.rb
@@ -632,19 +632,5 @@ RSpec.describe MergeRequestPolicy, feature_category: :code_review_workflow do
it 'allows admin to read the merge_request', :enable_admin_mode do
expect(permissions(admin, hidden_merge_request)).to be_allowed(:read_merge_request)
end
-
- context 'when the `hide_merge_requests_from_banned_users` feature flag is disabled' do
- before do
- stub_feature_flags(hide_merge_requests_from_banned_users: false)
- end
-
- it 'allows non-admin users to read the merge_request' do
- expect(permissions(user, hidden_merge_request)).to be_allowed(:read_merge_request)
- end
-
- it 'allows admin users to read the merge_request', :enable_admin_mode do
- expect(permissions(admin, hidden_merge_request)).to be_allowed(:read_merge_request)
- end
- end
end
end
diff --git a/spec/requests/application_controller_spec.rb b/spec/requests/application_controller_spec.rb
index b85817e03c0..b85d1699146 100644
--- a/spec/requests/application_controller_spec.rb
+++ b/spec/requests/application_controller_spec.rb
@@ -282,4 +282,39 @@ RSpec.describe ApplicationController, type: :request, feature_category: :shared
end
end
end
+
+ context 'when configuring vite' do
+ let(:vite_hmr_ws_origin) { 'ws://gitlab.example.com:3808' }
+ let(:vite_origin) { 'http://gitlab.example.com:3808' }
+
+ before do
+ # rubocop:disable RSpec/AnyInstanceOf -- Doesn't work with allow_next_instance_of
+ allow_any_instance_of(ViteHelper)
+ .to receive_messages(
+ vite_enabled?: vite_enabled,
+ vite_hmr_ws_origin: vite_hmr_ws_origin,
+ vite_origin: vite_origin,
+ universal_path_to_stylesheet: '')
+ # rubocop:enable RSpec/AnyInstanceOf
+ end
+
+ context 'when vite enabled during development' do
+ let(:vite_enabled) { true }
+
+ it 'adds vite csp' do
+ get root_path
+ expect(response.headers['Content-Security-Policy']).to include("#{vite_hmr_ws_origin}/vite-dev/")
+ expect(response.headers['Content-Security-Policy']).to include("#{vite_origin}/vite-dev/")
+ end
+ end
+
+ context 'when vite is disabled' do
+ let(:vite_enabled) { false }
+
+ it "doesn't add vite csp" do
+ get root_path
+ expect(response.headers['Content-Security-Policy']).not_to include('/vite-dev/')
+ end
+ end
+ end
end
diff --git a/spec/requests/projects/merge_requests/diffs_stream_spec.rb b/spec/requests/projects/merge_requests/diffs_stream_spec.rb
index ad57117bc66..04c14ffcc99 100644
--- a/spec/requests/projects/merge_requests/diffs_stream_spec.rb
+++ b/spec/requests/projects/merge_requests/diffs_stream_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe 'Merge Requests Diffs stream', feature_category: :code_review_wor
context 'when accessed' do
it 'passes hash of options to #diffs_for_streaming' do
expect_next_instance_of(::Projects::MergeRequests::DiffsStreamController) do |controller|
- context = {}
+ context = controller.view_context
allow(controller).to receive(:view_context).and_return(context)
expect(controller).to receive(:stream_diff_files)
.with(diff_options_hash, context)
diff --git a/spec/support/helpers/content_security_policy_helpers.rb b/spec/support/helpers/content_security_policy_helpers.rb
index 8e576629811..a1883375e76 100644
--- a/spec/support/helpers/content_security_policy_helpers.rb
+++ b/spec/support/helpers/content_security_policy_helpers.rb
@@ -5,7 +5,7 @@ module ContentSecurityPolicyHelpers
# 1. call that's being tested
# 2. call in ApplicationController
def setup_csp_for_controller(
- controller_class, csp = ActionDispatch::ContentSecurityPolicy.new, times: 2,
+ controller_class, csp = ActionDispatch::ContentSecurityPolicy.new, times: 3,
any_time: false)
expect_next_instance_of(controller_class) do |controller|
if any_time
diff --git a/spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb b/spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb
index 48c2490a9bb..1a01df0ce3a 100644
--- a/spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/base_action_controller_shared_examples.rb
@@ -45,45 +45,6 @@ RSpec.shared_examples 'Base action controller' do
it_behaves_like 'snowplow is not in the CSP'
end
end
-
- context 'when configuring vite' do
- let(:vite_hmr_websocket_url) { "ws://gitlab.example.com:3808" }
- let(:vite_hmr_http_url) { "http://gitlab.example.com:3808" }
- let(:vite_gitlab_url) { Gitlab::Utils.append_path(Gitlab.config.gitlab.url, 'vite-dev/') }
-
- context 'when vite enabled during development',
- skip: 'https://gitlab.com/gitlab-org/gitlab/-/issues/424334' do
- before do
- stub_rails_env('development')
- allow(ViteHelper).to receive(:vite_enabled?).and_return(true)
- allow(BaseActionController.helpers).to receive(:vite_enabled?).and_return(true)
- allow(BaseActionController.helpers).to receive(:vite_hmr_websocket_url).and_return(vite_hmr_websocket_url)
- allow(BaseActionController.helpers).to receive(:vite_hmr_http_url).and_return(vite_hmr_http_url)
- end
-
- it 'adds vite csp' do
- request
-
- expect(response.headers['Content-Security-Policy']).to include("#{vite_hmr_websocket_url}/vite-dev/")
- expect(response.headers['Content-Security-Policy']).to include("#{vite_hmr_http_url}/vite-dev/")
- expect(response.headers['Content-Security-Policy']).to include(vite_gitlab_url)
- end
- end
-
- context 'when vite disabled' do
- before do
- allow(BaseActionController.helpers).to receive(:vite_enabled?).and_return(false)
- end
-
- it "doesn't add vite csp" do
- request
-
- expect(response.headers['Content-Security-Policy']).not_to include(vite_hmr_websocket_url)
- expect(response.headers['Content-Security-Policy']).not_to include(vite_hmr_http_url)
- expect(response.headers['Content-Security-Policy']).not_to include(vite_gitlab_url)
- end
- end
- end
end
end
end
diff --git a/vite.config.js b/vite.config.js
index d508be3ab1c..5079627ab91 100644
--- a/vite.config.js
+++ b/vite.config.js
@@ -124,9 +124,7 @@ export default defineConfig({
'process.env.GITLAB_WEB_IDE_PUBLIC_PATH': JSON.stringify(GITLAB_WEB_IDE_PUBLIC_PATH),
'window.IS_VITE': JSON.stringify(true),
'window.VUE_DEVTOOLS_CONFIG.openInEditorHost': JSON.stringify(
- viteGDKConfig.hmr
- ? `${process.env.VITE_HMR_HTTP_URL}/vite-dev/`
- : `http://${viteGDKConfig.host}:${viteGDKConfig.port}/vite-dev/`,
+ `${viteGDKConfig.https?.enabled ? 'https' : 'http'}://${viteGDKConfig.public_host}:${viteGDKConfig.port}/vite-dev/`,
),
'process.env.PDF_JS_WORKER_PUBLIC_PATH': JSON.stringify(PDF_JS_WORKER_PUBLIC_PATH),
'process.env.PDF_JS_CMAPS_UBLIC_PATH': JSON.stringify(PDF_JS_CMAPS_PUBLIC_PATH),
@@ -136,8 +134,12 @@ export default defineConfig({
warmup: {
clientFiles: ['javascripts/entrypoints/main.js', 'javascripts/entrypoints/super_sidebar.js'],
},
- hmr: viteGDKConfig.hmr,
- https: false,
+ https: viteGDKConfig.https?.enabled
+ ? {
+ key: viteGDKConfig.https?.key,
+ cert: viteGDKConfig.https?.certificate,
+ }
+ : false,
watch:
viteGDKConfig.hmr === null
? null