(states) do
- where(
- reviewers_subquery
- .where(Arel::Table.new("#{to_ability_name}_reviewers")[:state].in(states))
- .exists
- )
+ scope :review_states, ->(states, ignored_reviewer = nil) do
+ reviewers = Arel::Table.new("#{to_ability_name}_reviewers")
+
+ scope = reviewers_subquery.where(reviewers[:state].in(states))
+ scope = scope.where(reviewers[:user_id].not_eq(ignored_reviewer.id)) if ignored_reviewer
+
+ where(scope.exists)
end
scope :not_only_reviewer, ->(user) do
@@ -539,16 +540,15 @@ class MergeRequest < ApplicationRecord
review_requested.where.not(subquery.exists)
end
- scope :no_review_states, ->(states) do
- where(
- reviewers_subquery.exists
- )
- .where(
- reviewers_subquery
- .where(Arel::Table.new("#{to_ability_name}_reviewers")[:state].in(states))
- .exists
- .not
- )
+ scope :no_review_states, ->(states, ignored_reviewer = nil) do
+ reviewers = Arel::Table.new("#{to_ability_name}_reviewers")
+
+ scope = reviewers_subquery
+ scope = scope.where(reviewers[:user_id].not_eq(ignored_reviewer.id)) if ignored_reviewer
+
+ forbidden = scope.clone.where(reviewers[:state].in(states))
+
+ where(scope.exists).where(forbidden.exists.not)
end
scope :assignee_or_reviewer, ->(user, assigned_review_states, reviewer_state) do
diff --git a/app/services/ci/pipeline_creation/find_pipeline_inputs_service.rb b/app/services/ci/pipeline_creation/find_pipeline_inputs_service.rb
index 1af749ee552..3c3b7a8312d 100644
--- a/app/services/ci/pipeline_creation/find_pipeline_inputs_service.rb
+++ b/app/services/ci/pipeline_creation/find_pipeline_inputs_service.rb
@@ -17,11 +17,11 @@ module Ci
def execute
unless current_user.can?(:download_code, project)
- return error_response('insufficient permissions to read inputs')
+ return error_response(s_('Pipelines|Insufficient permissions to read inputs'))
end
if !project.repository.branch_or_tag?(ref) || sha.blank?
- return error_response('ref can only be an existing branch or tag')
+ return error_response(s_('Pipelines|Can only run new pipelines for an existing branch or tag'))
end
# The project config may not exist if the project is using a policy.
@@ -36,14 +36,14 @@ module Ci
# We need to read the uninterpolated YAML of the included file.
yaml_content = ::Gitlab::Ci::Config::Yaml.load!(project_config.content)
yaml_result = yaml_result_of_internal_include(yaml_content)
- return error_response('invalid YAML config') unless yaml_result&.valid?
+ return error_response(s_('Pipelines|Invalid YAML syntax')) unless yaml_result&.valid?
spec_inputs = Ci::PipelineCreation::Inputs::SpecInputs.new(yaml_result.spec[:inputs])
return error_response(spec_inputs.errors.join(', ')) if spec_inputs.errors.any?
success_response(spec_inputs)
else
- error_response('inputs not supported for this CI config source')
+ error_response(s_('Pipelines|Inputs not supported for this CI config source'))
end
rescue ::Gitlab::Ci::Config::Yaml::LoadError => e
error_response("YAML load error: #{e.message}")
diff --git a/config/feature_flags/wip/group_remove_dormant_members.yml b/config/feature_flags/wip/group_remove_dormant_members.yml
deleted file mode 100644
index d9b252e2a82..00000000000
--- a/config/feature_flags/wip/group_remove_dormant_members.yml
+++ /dev/null
@@ -1,9 +0,0 @@
----
-name: group_remove_dormant_members
-feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/461339
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/153118
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/461567
-milestone: '17.1'
-group: group::utilization
-type: wip
-default_enabled: false
diff --git a/config/helpers/vite_plugin_cross_origin_worker.js b/config/helpers/vite_plugin_cross_origin_worker.js
new file mode 100644
index 00000000000..b10a47cc1c9
--- /dev/null
+++ b/config/helpers/vite_plugin_cross_origin_worker.js
@@ -0,0 +1,34 @@
+const replacer = (fullImport, importName, importPath) => {
+ const workerUrl = `${importName}Url`;
+ const blobName = `${importName}Blob`;
+ return `/* worker import was replaced to support cross-origin workers */
+import ${workerUrl} from '${importPath}&url';
+const ${blobName} = new Blob([\`import \${JSON.stringify(new URL(${workerUrl}, import.meta.url))}\`], { type: "application/javascript" });
+function ${importName}(options) {
+ const objURL = URL.createObjectURL(${blobName});
+ const worker = new Worker(objURL, { type: "module", name: options?.name });
+ worker.addEventListener("error", (e) => { URL.revokeObjectURL(objURL) });
+ return worker;
+}
+/* end of replaced code */`;
+};
+
+export const CrossOriginWorkerPlugin = () => {
+ let config;
+ return {
+ name: 'vite-worker-transform-plugin',
+ configResolved(resolvedConfig) {
+ config = resolvedConfig;
+ },
+ transform(code) {
+ if (config.command !== 'serve' || !code.includes('?worker')) {
+ return null;
+ }
+
+ return {
+ code: code.replace(/import\s+(\w+)\s+from\s+['"](.*?\?worker)['"];/g, replacer),
+ map: null,
+ };
+ },
+ };
+};
diff --git a/config/settings.rb b/config/settings.rb
index e884b2859ed..8d6d2e9da71 100644
--- a/config/settings.rb
+++ b/config/settings.rb
@@ -132,11 +132,6 @@ Settings = GitlabSettings.load(file, Rails.env) do
File.expand_path(path, Rails.root)
end
- # FIXME: Deprecated in favor of Gitlab::Encryption::KeyProvider
- def attr_encrypted_db_key_base_truncated
- db_key_base_keys_truncated.first
- end
-
# Don't use this in new code, use db_key_base_keys_32_bytes instead!
def db_key_base_keys_truncated
db_key_base_keys.map do |key| # rubocop:disable Rails/Pluck -- No Rails context
@@ -144,11 +139,6 @@ Settings = GitlabSettings.load(file, Rails.env) do
end
end
- # FIXME: Deprecated in favor of Gitlab::Encryption::KeyProvider
- def attr_encrypted_db_key_base_32
- db_key_base_keys_32_bytes.first
- end
-
# Ruby 2.4+ requires passing in the exact required length for OpenSSL keys
# (https://github.com/ruby/ruby/commit/ce635262f53b760284d56bb1027baebaaec175d1).
# Previous versions quietly truncated the input.
@@ -162,11 +152,6 @@ Settings = GitlabSettings.load(file, Rails.env) do
end
end
- # FIXME: Deprecated in favor of Gitlab::Encryption::KeyProvider
- def attr_encrypted_db_key_base
- db_key_base_keys.first
- end
-
# This should be used for :per_attribute_iv_and_salt mode. There is no
# need to truncate the key because the encryptor will use the salt to
# generate a hash of the password:
diff --git a/db/post_migrate/20250424050001_prepare_to_drop_index_p_ci_builds_trigger_request_id.rb b/db/post_migrate/20250424050001_prepare_to_drop_index_p_ci_builds_trigger_request_id.rb
new file mode 100644
index 00000000000..3aa26d816bd
--- /dev/null
+++ b/db/post_migrate/20250424050001_prepare_to_drop_index_p_ci_builds_trigger_request_id.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class PrepareToDropIndexPCiBuildsTriggerRequestId < Gitlab::Database::Migration[2.2]
+ milestone '18.0'
+ disable_ddl_transaction!
+
+ TABLE = :p_ci_builds
+ COLUMN = :trigger_request_id
+ INDEX_NAME = :tmp_p_ci_builds_trigger_request_id_idx
+
+ def up
+ prepare_async_index_removal(TABLE, COLUMN, name: INDEX_NAME)
+ end
+
+ def down
+ unprepare_async_index(TABLE, COLUMN, name: INDEX_NAME)
+ end
+end
diff --git a/db/schema_migrations/20250424050001 b/db/schema_migrations/20250424050001
new file mode 100644
index 00000000000..597d682f1f9
--- /dev/null
+++ b/db/schema_migrations/20250424050001
@@ -0,0 +1 @@
+7be20294f7af1c1deb5e50b196ac9cfea96f7783c3bc9b4f90ea3a59f5477fa8
\ No newline at end of file
diff --git a/doc/api/graphql/reference/_index.md b/doc/api/graphql/reference/_index.md
index 373c383e501..f77055dab96 100644
--- a/doc/api/graphql/reference/_index.md
+++ b/doc/api/graphql/reference/_index.md
@@ -80,24 +80,6 @@ four standard [pagination arguments](#pagination-arguments):
| ---- | ---- | ----------- |
| `searchTerm` | [`String`](#string) | Search term to find labels with. |
-### `Query.addOnPurchase`
-
-Retrieve the active add-on purchase. This query can be used in GitLab SaaS and self-managed environments.
-
-{{< details >}}
-**Deprecated** in GitLab 17.4.
-Use [addOnPurchases](#queryaddonpurchases) instead.
-{{< /details >}}
-
-Returns [`AddOnPurchase`](#addonpurchase).
-
-#### Arguments
-
-| Name | Type | Description |
-| ---- | ---- | ----------- |
-| `addOnType` | [`GitlabSubscriptionsAddOnType!`](#gitlabsubscriptionsaddontype) | Type of add-on for the add-on purchase. |
-| `namespaceId` | [`NamespaceID`](#namespaceid) | ID of namespace that the add-on was purchased for. |
-
### `Query.addOnPurchases`
Retrieve all active add-on purchases. This query can be used in GitLab.com and self-managed environments.
@@ -20495,6 +20477,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -20553,6 +20536,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -20673,6 +20657,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -21536,6 +21521,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -21594,6 +21580,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -21726,6 +21713,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -24383,6 +24371,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -24444,6 +24433,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -24501,6 +24491,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -24621,6 +24612,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -28357,6 +28349,7 @@ four standard [pagination arguments](#pagination-arguments):
| `deploymentId` | [`String`](#string) | ID of the deployment. |
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Return merge requests from archived projects. |
@@ -30833,6 +30826,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -30891,6 +30885,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -31011,6 +31006,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -31252,6 +31248,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -31310,6 +31307,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -31430,6 +31428,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -31722,6 +31721,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -31780,6 +31780,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -31900,6 +31901,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -32160,6 +32162,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -32218,6 +32221,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -32338,6 +32342,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -36069,6 +36074,7 @@ four standard [pagination arguments](#pagination-arguments):
| `deploymentId` | [`String`](#string) | ID of the deployment. |
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `labelName` | [`[String]`](#string) | Labels applied to the merge request. |
@@ -39606,6 +39612,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -39664,6 +39671,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -39784,6 +39792,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -48064,6 +48073,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -48122,6 +48132,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
@@ -48242,6 +48253,7 @@ four standard [pagination arguments](#pagination-arguments):
| `draft` | [`Boolean`](#boolean) | Limit result to draft merge requests. |
| `environmentName` | [`String`](#string) | Environment merge requests have been deployed to. |
| `groupId` | [`GroupID`](#groupid) | The global ID of the group the authored merge requests should be in. Merge requests in subgroups are included. |
+| `ignoredReviewerUsername` {{< icon name="warning-solid" >}} | [`String`](#string) | **Introduced** in GitLab 18.0. **Status**: Experiment. Username of the reviewer to ignore when searching by reviewer state. |
| `iids` | [`[String!]`](#string) | Array of IIDs of merge requests, for example `[1, 2]`. |
| `in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| `includeArchived` | [`Boolean`](#boolean) | Merge requests from archived projects. |
diff --git a/doc/development/namespaces.md b/doc/development/namespaces.md
index 2b90da86b14..31d1ea0f2d8 100644
--- a/doc/development/namespaces.md
+++ b/doc/development/namespaces.md
@@ -217,7 +217,7 @@ namespace_object.recursive_self_and_hierarchy
### Search using trie data structure
`Namespaces::Traversal::TrieNode` implements a trie data structure to efficiently search within
-`namespaces.traveral_ids` hierarchy for a set of Namespaces.
+`namespaces.traversal_ids` hierarchy for a set of Namespaces.
```ruby
traversal_ids = [[9970, 123], [9970, 456]] # Derived from (for example): Namespace.where(...).map(&:traversal_ids)
diff --git a/doc/development/pipelines/_index.md b/doc/development/pipelines/_index.md
index b6631595444..ba8f736794d 100644
--- a/doc/development/pipelines/_index.md
+++ b/doc/development/pipelines/_index.md
@@ -755,6 +755,7 @@ NOTE: With the addition of PG17, we are close to the limit of nightly jobs, with
| `maintenance` scheduled pipelines for the `master` branch (every even-numbered hour at XX:05) | 16 (default version) | 3.2 (default version) |
| `maintenance` scheduled pipelines for the `ruby-next` branch (every odd-numbered hour at XX:10) | 16 (default version) | 3.3 |
| `nightly` scheduled pipelines for the `master` branch | 16 (default version), 14, 15 and 17 | 3.2 (default version) |
+| `weekly` scheduled pipelines for the `master` branch | 16 (default version) | 3.2 (default version) |
For the next Ruby versions we're testing against with, we run
maintenance scheduled pipelines every 2 hours on the `ruby-next` branch.
@@ -811,6 +812,7 @@ test suites use PostgreSQL 16 because there is no dependency between the databas
|-------------------------------------------------------------------------------------------------|-----------------------|----------------------|----------------------|
| Merge requests with label `~group::global search` or `~pipeline:run-search-tests` | 8.X (production) | | 16 (default version) |
| `nightly` scheduled pipelines for the `master` branch | 7.X, 8.X (production) | 1.X, 2.X | 16 (default version) |
+| `weekly` scheduled pipelines for the `master` branch | | latest | 16 (default version) |
## Monitoring
diff --git a/doc/user/gitlab_duo/tutorials/fix_code_python_shop.md b/doc/user/gitlab_duo/tutorials/fix_code_python_shop.md
index bf677014f62..c12543b7b9f 100644
--- a/doc/user/gitlab_duo/tutorials/fix_code_python_shop.md
+++ b/doc/user/gitlab_duo/tutorials/fix_code_python_shop.md
@@ -1411,4 +1411,7 @@ exists to create this tutorial.
-
[GitLab Duo Code Suggestions](https://youtu.be/ds7SG1wgcVM?si=MfbzPIDpikGhoPh7)
+-
+ [Application modernization with GitLab Duo (C++ to Java)](https://youtu.be/FjoAmt5eeXA?si=SLv9Mv8eSUAVwW5Z)
+
diff --git a/doc/user/gitlab_duo_chat/examples.md b/doc/user/gitlab_duo_chat/examples.md
index 7b96635d174..a6c8228c9f8 100644
--- a/doc/user/gitlab_duo_chat/examples.md
+++ b/doc/user/gitlab_duo_chat/examples.md
@@ -249,7 +249,11 @@ You can also add additional instructions to be considered. For example:
- `/explain how concurrency works in this context` (Go)
- `/explain how the request reaches the client` (REST API, database)
-For more information, see [Use GitLab Duo Chat in VS Code](_index.md#use-gitlab-duo-chat-in-vs-code).
+For more information, see:
+
+- [Use GitLab Duo Chat in VS Code](_index.md#use-gitlab-duo-chat-in-vs-code).
+- [Application modernization with GitLab Duo (C++ to Java)](https://youtu.be/FjoAmt5eeXA?si=SLv9Mv8eSUAVwW5Z).
+
In the GitLab UI, you can also explain code in:
@@ -420,6 +424,9 @@ You can include additional instructions to be considered. For example:
- Focus on performance, for example `/refactor improving performance`.
- Focus on potential vulnerabilities, for example `/refactor avoiding memory leaks and exploits`.
+For more information, see [Application modernization with GitLab Duo (C++ to Java)](https://youtu.be/FjoAmt5eeXA?si=SLv9Mv8eSUAVwW5Z).
+
+
## Fix code in the IDE
{{< details >}}
diff --git a/doc/user/group/moderate_users.md b/doc/user/group/moderate_users.md
index 9d3d04de490..eef096396b7 100644
--- a/doc/user/group/moderate_users.md
+++ b/doc/user/group/moderate_users.md
@@ -85,14 +85,6 @@ To unban a user:
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/461339) in GitLab 17.1 [with a flag](../../administration/feature_flags.md) named `group_remove_dormant_members`. Disabled by default.
> [Released](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/178851) as a [beta](../../policy/development_stages_support.md#beta) feature in GitLab 17.9.
-{{< alert type="flag" >}}
-
-The availability of this feature is controlled by a feature flag.
-For more information, see the history.
-This feature is available for testing, but not ready for production use.
-
-{{< /alert >}}
-
Prerequisites:
- You must have the Owner role for the group.
diff --git a/lib/ci/pipeline_creation/inputs/spec_inputs.rb b/lib/ci/pipeline_creation/inputs/spec_inputs.rb
index d87b0eceaf9..9399eaad5c2 100644
--- a/lib/ci/pipeline_creation/inputs/spec_inputs.rb
+++ b/lib/ci/pipeline_creation/inputs/spec_inputs.rb
@@ -20,6 +20,8 @@ module Ci
@inputs = []
@errors = []
+ return unless valid_specs?(specs)
+
build_inputs!(specs.to_h)
end
@@ -62,6 +64,17 @@ module Ci
@inputs << input_type.new(name: input_name, spec: spec)
end
end
+
+ def valid_specs?(specs)
+ return true if specs.respond_to?(:to_h)
+
+ @errors.push(
+ format(s_("Pipelines|Invalid input specification: expected a hash-like object, got %{class_name}"),
+ class_name: specs.class.name)
+ )
+
+ false
+ end
end
end
end
diff --git a/lib/gitlab/ci/templates/Jobs/Secret-Detection.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Secret-Detection.latest.gitlab-ci.yml
index 23eb3f56dd7..e51fb2cf0dd 100644
--- a/lib/gitlab/ci/templates/Jobs/Secret-Detection.latest.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Jobs/Secret-Detection.latest.gitlab-ci.yml
@@ -31,6 +31,7 @@ variables:
access: 'developer'
reports:
secret_detection: gl-secret-detection-report.json
+ cache: []
secret_detection:
extends: .secret-analyzer
diff --git a/lib/gitlab/database/reindexing.rb b/lib/gitlab/database/reindexing.rb
index 388307bf09a..6f4b61821f6 100644
--- a/lib/gitlab/database/reindexing.rb
+++ b/lib/gitlab/database/reindexing.rb
@@ -40,6 +40,9 @@ module Gitlab
automatic_reindexing
end
+
+ # Temporary in order to truncate this table during low traffic
+ TruncateTaggings.new.execute
rescue StandardError => e
Gitlab::AppLogger.error(e)
raise
diff --git a/lib/gitlab/database/truncate_taggings.rb b/lib/gitlab/database/truncate_taggings.rb
new file mode 100644
index 00000000000..356b1aea6ed
--- /dev/null
+++ b/lib/gitlab/database/truncate_taggings.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Database
+ class TruncateTaggings
+ include AsyncDdlExclusiveLeaseGuard
+
+ def execute
+ return unless Gitlab.com_except_jh? # rubocop:disable Gitlab/AvoidGitlabInstanceChecks -- it's not a feature
+ return unless taggings_has_any_data?
+
+ try_obtain_lease do
+ connection.execute('TRUNCATE TABLE "taggings"')
+ end
+ end
+
+ def taggings_has_any_data?
+ !!connection.select_value('SELECT TRUE FROM "taggings" LIMIT 1')
+ end
+
+ def connection
+ ::Ci::ApplicationRecord.connection
+ end
+
+ def connection_db_config
+ ::Ci::ApplicationRecord.connection_db_config
+ end
+
+ def lease_timeout
+ 10.minutes
+ end
+ end
+ end
+end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 1732abfe58b..9b3f271ebca 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -44702,6 +44702,9 @@ msgstr ""
msgid "Pipelines|By revoking a trigger token you will break any processes making use of it. Are you sure?"
msgstr ""
+msgid "Pipelines|Can only run new pipelines for an existing branch or tag"
+msgstr ""
+
msgid "Pipelines|Child pipeline (%{linkStart}parent%{linkEnd})"
msgstr ""
@@ -44777,6 +44780,18 @@ msgstr ""
msgid "Pipelines|Inputs"
msgstr ""
+msgid "Pipelines|Inputs not supported for this CI config source"
+msgstr ""
+
+msgid "Pipelines|Insufficient permissions to read inputs"
+msgstr ""
+
+msgid "Pipelines|Invalid YAML syntax"
+msgstr ""
+
+msgid "Pipelines|Invalid input specification: expected a hash-like object, got %{class_name}"
+msgstr ""
+
msgid "Pipelines|It is recommended the code is reviewed thoroughly before running this pipeline with the parent project's CI resource."
msgstr ""
@@ -44921,7 +44936,7 @@ msgstr ""
msgid "Pipelines|There was a problem fetching the pipeline iid."
msgstr ""
-msgid "Pipelines|There was a problem fetching the pipeline inputs."
+msgid "Pipelines|There was a problem fetching the pipeline inputs. Please try again."
msgstr ""
msgid "Pipelines|There was a problem fetching the pipeline stage jobs."
@@ -59514,6 +59529,9 @@ msgstr ""
msgid "SuperSonics|Cannot activate instance due to a connectivity issue"
msgstr ""
+msgid "SuperSonics|Could not activate subscription"
+msgstr ""
+
msgid "SuperSonics|Customers Portal"
msgstr ""
@@ -59556,6 +59574,9 @@ msgstr ""
msgid "SuperSonics|Start free trial"
msgstr ""
+msgid "SuperSonics|Subscription cannot be activated if %{silentModeDocsLinkStart}Silent Mode%{silentModeDocsLinkEnd} is enabled. Disable Silent Mode and try again."
+msgstr ""
+
msgid "SuperSonics|Subscription detail synchronization has started and will complete soon."
msgstr ""
diff --git a/scripts/extract_fingerprints b/scripts/extract_fingerprints
deleted file mode 100755
index 865596ec61e..00000000000
--- a/scripts/extract_fingerprints
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env ruby
-# frozen_string_literal: true
-
-# Script to extract SQL query fingerprints from auto-explain logs
-require 'json'
-require 'zlib'
-
-if ARGV.size < 2
- puts "Usage: #{$PROGRAM_NAME} "
- exit 1
-end
-
-input_file = ARGV[0]
-output_file = ARGV[1]
-
-unless File.exist?(input_file)
- puts "Error: Input file not found - #{input_file}"
- exit 1
-end
-
-fingerprints = Set.new
-
-begin
- # Handle both compressed and uncompressed files
- if input_file.end_with?('.gz')
- Zlib::GzipReader.open(input_file) do |gz|
- gz.each_line do |line|
- data = JSON.parse(line)
- fingerprints.add(data['fingerprint']) if data['fingerprint']
- rescue JSON::ParserError
- # empty
- end
- end
- else
- File.foreach(input_file) do |line|
- data = JSON.parse(line)
- fingerprints.add(data['fingerprint']) if data['fingerprint']
- rescue JSON::ParserError
- # empty
- end
- end
-
- File.open(output_file, 'w') { |f| fingerprints.each { |fp| f.puts(fp) } }
-rescue StandardError => e
- puts "Error: #{e.message}"
- exit 1
-end
diff --git a/scripts/gitlab_component_helpers.sh b/scripts/gitlab_component_helpers.sh
index 80641cf49f3..3eeba79a015 100644
--- a/scripts/gitlab_component_helpers.sh
+++ b/scripts/gitlab_component_helpers.sh
@@ -232,7 +232,7 @@ export FINGERPRINTS_PACKAGE_URL="${API_PACKAGES_BASE_URL}/auto-explain-logs/mast
function extract_and_upload_fingerprints() {
echo "Extracting SQL query fingerprints from ${RSPEC_AUTO_EXPLAIN_LOG_PATH}"
- ruby scripts/extract_fingerprints "${RSPEC_AUTO_EXPLAIN_LOG_PATH}" "${FINGERPRINTS_FILE}.new"
+ ruby scripts/sql_fingerprint_extractor.rb "${RSPEC_AUTO_EXPLAIN_LOG_PATH}" "${FINGERPRINTS_FILE}.new"
# Check if any new fingerprints were found
new_count=$(wc -l < "${FINGERPRINTS_FILE}.new")
diff --git a/scripts/merge_request_query_differ.rb b/scripts/merge_request_query_differ.rb
new file mode 100755
index 00000000000..42540214711
--- /dev/null
+++ b/scripts/merge_request_query_differ.rb
@@ -0,0 +1,298 @@
+#!/usr/bin/env ruby
+# frozen_string_literal: true
+
+require 'json'
+require 'uri'
+require 'net/http'
+require 'zlib'
+require 'rubygems/package'
+require 'stringio'
+require 'logger'
+require_relative 'sql_fingerprint_extractor'
+
+# MergeRequestQueryDiffer compares auto_explain logs from an MR against master
+# to identify new query patterns introduced by the MR
+class MergeRequestQueryDiffer
+ PROJECT_ID = ENV['CI_PROJECT_ID'] || '278964'
+ PACKAGE_NAME = 'auto-explain-logs'
+ PACKAGE_FILE = 'query-fingerprints.tar.gz'
+ NEW_QUERIES_PATH = 'new_sql_queries.md'
+ CONSOLIDATED_FINGERPRINTS_URL = ENV['CONSOLIDATED_FINGERPRINTS_URL'] ||
+ "https://gitlab.com/api/v4/projects/#{PROJECT_ID}/packages/generic/#{PACKAGE_NAME}/master/#{PACKAGE_FILE}"
+
+ attr_reader :mr_auto_explain_path, :output_file, :logger, :sql_fingerprint_extractor, :report_generator
+
+ def initialize(mr_auto_explain_path, logger = nil)
+ @mr_auto_explain_path = mr_auto_explain_path
+ output_dir = File.dirname(mr_auto_explain_path)
+ @output_file = File.join(output_dir, NEW_QUERIES_PATH)
+ @logger = logger || Logger.new($stdout)
+ @sql_fingerprint_extractor = SQLFingerprintExtractor.new(@logger)
+ @report_generator = ReportGenerator.new(@logger)
+ end
+
+ def run
+ logger.info "MR Query Diff: Analyzing new queries in MR compared to master"
+
+ # Step 1: Extract query fingerprints from MR
+ mr_queries = sql_fingerprint_extractor.extract_queries_from_file(mr_auto_explain_path)
+ if mr_queries.empty?
+ logger.info "No queries found in MR file"
+ write_report(output_file, "# SQL Query Analysis\n\nNo queries found in this MR.")
+ return 0
+ end
+
+ mr_fingerprints = mr_queries.filter_map { |q| q['fingerprint'] }
+ if mr_fingerprints.empty?
+ logger.info "No fingerprints found in MR queries... exiting"
+ return 0
+ end
+
+ logger.info "Found #{mr_fingerprints.size} total queries in MR"
+
+ # Step 2: Get master fingerprints
+ master_fingerprints = get_master_fingerprints
+ if master_fingerprints.empty?
+ logger.info "No master fingerprints found for comparison... exiting"
+ return 0
+ end
+
+ # Step 3: Compare and filter
+ mr_queries = filter_new_queries(mr_queries, master_fingerprints)
+
+ # Step 4: Report generation
+ logger.info "Final result: #{mr_queries.size} new queries compared to all master packages"
+ report = report_generator.generate(mr_queries)
+ write_report(output_file, report)
+ mr_queries.size
+ rescue StandardError => e
+ logger.info "Error in main execution: #{e.message}"
+ write_report(output_file, "# SQL Query Analysis\n\n️ Analysis failed: #{e.message}")
+ 0
+ end
+
+ def filter_new_queries(mr_queries, master_fingerprints)
+ original_count = mr_queries.size
+ logger.info "Filtering #{original_count} queries against master fingerprints..."
+
+ # Only keep queries with fingerprints not in master set
+ new_queries = mr_queries.select { |q| q['fingerprint'] && Set[q['fingerprint']].disjoint?(master_fingerprints) }
+
+ filtered_count = original_count - new_queries.size
+ logger.info "Filtered out #{filtered_count} existing queries, #{new_queries.size} new queries found"
+
+ if new_queries.empty?
+ logger.info "All queries in MR are already present in master packages"
+ write_report(output_file, %(# SQL Query Analysis
+
+ No new SQL queries detected in this MR.
+ All queries in this MR are already present in master
+ ))
+ end
+
+ new_queries
+ end
+
+ def get_master_fingerprints
+ logger.info "Fetching master fingerprints from consolidated package..."
+ fingerprints = Set.new
+
+ begin
+ content = download_consolidated_package
+ if content.nil?
+ logger.error "Failed to download consolidated package"
+ return fingerprints
+ end
+
+ # Extract fingerprints from the package
+ fingerprints = sql_fingerprint_extractor.extract_from_tar_gz(content)
+ logger.info "Loaded #{fingerprints.size} master fingerprints from consolidated package"
+ rescue StandardError => e
+ logger.error "Error loading master fingerprints: #{e.message}"
+ end
+
+ fingerprints
+ end
+
+ def download_consolidated_package(max_size_mb = 100)
+ logger.info "Downloading from: #{CONSOLIDATED_FINGERPRINTS_URL}"
+ url = URI(CONSOLIDATED_FINGERPRINTS_URL)
+
+ # Check file size first
+ begin
+ response = make_request(url, method: :head, parse_json: false)
+
+ if response.is_a?(Net::HTTPResponse)
+ content_length_mb = response['content-length'].to_i / (1024**2)
+ if content_length_mb > max_size_mb
+ logger.error "Package size (#{content_length_mb}MB) exceeds maximum allowed size (#{max_size_mb}MB)"
+ return
+ end
+ end
+ rescue StandardError => e
+ logger.warn "Warning: Could not validate file size: #{e}"
+ end
+
+ make_request(url, method: :get, parse_json: false)
+ end
+
+ def write_report(output_file, content)
+ File.write(output_file, content)
+ logger.info "Report saved to #{output_file}"
+ rescue StandardError => e
+ logger.error "Could not write report to file: #{e.message}"
+ end
+
+ def make_request(url, method: :get, parse_json: true, attempt: 1, max_attempts: 10)
+ if attempt >= max_attempts
+ logger.info "Maximum retry attempts (#{max_attempts}) reached for rate limiting"
+ return parse_json ? [] : nil
+ end
+
+ begin
+ http = Net::HTTP.new(url.host, url.port)
+ http.use_ssl = (url.scheme == 'https')
+ http.read_timeout = 120
+
+ request = build_request(method, url)
+ if ENV['GITLAB_TOKEN']
+ request['PRIVATE-TOKEN'] = ENV['GITLAB_TOKEN']
+ elsif ENV['CI_JOB_TOKEN']
+ request['JOB-TOKEN'] = ENV['CI_JOB_TOKEN']
+ end
+
+ response = http.request(request)
+
+ case response
+ when Net::HTTPSuccess
+ return response if method == :head
+
+ if parse_json
+ begin
+ JSON.parse(response.body)
+ rescue JSON::ParserError => e
+ logger.error "Failed to parse JSON: #{e.message}"
+ []
+ end
+ else
+ response.body
+ end
+
+ when Net::HTTPTooManyRequests,
+ Net::HTTPServerError,
+ Net::HTTPInternalServerError,
+ Net::HTTPServiceUnavailable,
+ Net::HTTPGatewayTimeout,
+ Net::HTTPBadGateway
+ backoff = [1 * (2**attempt), 60].min
+ logger.info "HTTP #{response.code} - Waiting and retrying after #{backoff} secs"
+ sleep(backoff)
+ make_request(
+ url, method: method, parse_json: parse_json, attempt: attempt + 1, max_attempts: max_attempts
+ )
+ else
+ logger.error "HTTP request failed: #{response.code} - #{response.message}"
+ parse_json ? [] : nil
+ end
+
+ rescue StandardError => e
+ logger.error "Error making request: #{e}"
+ parse_json ? [] : nil
+ end
+ end
+
+ private
+
+ def build_request(method, url)
+ case method
+ when :get
+ Net::HTTP::Get.new(url)
+ when :head
+ Net::HTTP::Head.new(url)
+ else
+ raise ArgumentError, "Unsupported HTTP method: #{method}"
+ end
+ end
+
+ # ReportGenerator handles creation of readable reports from query data
+ class ReportGenerator
+ attr_reader :logger
+
+ def initialize(logger)
+ @logger = logger || Logger.new($stdout)
+ end
+
+ def generate(mr_queries)
+ report = "# SQL Query Analysis\n\n"
+
+ if mr_queries.empty?
+ report += "No new SQL queries detected in this MR."
+ return report
+ end
+
+ report += "## Identified potential #{mr_queries.size} new SQL queries:\n\n"
+
+ mr_queries.each_with_index do |query, idx|
+ next unless query['normalized']
+
+ report += <<~DETAILS
+
+ Query #{idx + 1}: #{format_query_summary(query)}
+
+ ```sql
+ #{query['normalized']}
+ ```
+
+ **Fingerprint:** `#{query['fingerprint']}`
+
+ #{query['plan'] ? format_plan(query['plan']) : ''}
+
+ DETAILS
+ end
+
+ report
+ end
+
+ def format_query_summary(query)
+ text = query['normalized'] || ""
+
+ cleaned = text.gsub(/\s+/, ' ').strip
+ cleaned.size > 80 ? "#{cleaned[0..77]}..." : cleaned
+ end
+
+ def format_plan(plan)
+ return "" unless plan
+
+ <<~PLAN
+ **Execution Plan:**
+ ```json
+ #{
+ if plan.is_a?(Hash)
+ JSON.pretty_generate(plan)
+ else
+ plan.respond_to?(:to_s) ? plan.to_s : plan.inspect
+ end
+ }
+ ```
+
+ PLAN
+ end
+ end
+ private_constant :ReportGenerator
+end
+
+if $PROGRAM_NAME == __FILE__
+ if ARGV.empty?
+ puts "Usage: #{$PROGRAM_NAME} "
+ exit 1
+ end
+
+ mr_auto_explain_path = ARGV[0]
+ unless File.exist?(mr_auto_explain_path)
+ puts "Error: File not found - #{mr_auto_explain_path}"
+ exit 1
+ end
+
+ diff = MergeRequestQueryDiffer.new(mr_auto_explain_path)
+ diff.run
+end
diff --git a/scripts/sql_fingerprint_extractor.rb b/scripts/sql_fingerprint_extractor.rb
new file mode 100755
index 00000000000..3c5697925da
--- /dev/null
+++ b/scripts/sql_fingerprint_extractor.rb
@@ -0,0 +1,133 @@
+#!/usr/bin/env ruby
+# frozen_string_literal: true
+
+require 'json'
+require 'zlib'
+require 'logger'
+require 'stringio'
+require 'rubygems/package'
+
+# SQLFingerprintExtractor extracts and processes SQL query fingerprints
+# from various file formats including NDJSON, gzipped NDJSON, and tar.gz archives
+class SQLFingerprintExtractor
+ attr_reader :logger
+
+ def initialize(logger = nil)
+ @logger = logger || Logger.new($stdout)
+ end
+
+ # Extract fingerprints from a local file (compressed or uncompressed)
+ # Returns an array of query objects with fingerprints
+ def extract_queries_from_file(file_path)
+ logger.info "Extracting queries from file: #{file_path}" if logger
+ queries = []
+
+ begin
+ if file_path.end_with?('.gz')
+ Zlib::GzipReader.open(file_path) do |gz|
+ gz.each_line do |line|
+ process_json_line(line, queries)
+ end
+ end
+ else
+ File.foreach(file_path) do |line|
+ process_json_line(line, queries)
+ end
+ end
+ rescue StandardError => e
+ logger.warn "Warning: Error reading file: #{e.message}" if logger
+ return [] # Return empty array on error
+ end
+
+ logger.info "Extracted #{queries.size} queries from file: #{file_path}" if logger
+ queries
+ end
+
+ # Extract just the fingerprint strings from a file
+ # Returns a Set of fingerprint strings
+ def extract_fingerprints_from_file(file_path)
+ queries = extract_queries_from_file(file_path)
+ Set.new(queries.filter_map { |q| q['fingerprint'] })
+ end
+
+ # Extract fingerprints from a tar.gz content
+ # Returns a Set of fingerprint strings
+ def extract_from_tar_gz(content, max_size_mb = 250)
+ fingerprints = Set.new
+ max_size = max_size_mb * (1024**2) # guardrail to prevent issues if unexpectedly large
+
+ begin
+ io = StringIO.new(content)
+ gz = Zlib::GzipReader.new(io)
+ tar = Gem::Package::TarReader.new(gz)
+
+ tar&.each do |entry|
+ # Now looking for raw fingerprint files (any text file)
+ next unless entry.file? && !entry.directory?
+
+ # Check file size before reading
+ if entry.header.size > max_size
+ logger.error(
+ "File too large: #{entry.header.size / (1024**2)}MB exceeds limit #{max_size_mb}MB"
+ )
+ return fingerprints
+ end
+
+ entry_content = entry.read
+ entry_content.each_line do |line|
+ fingerprint = line.strip
+ fingerprints.add(fingerprint) unless fingerprint.empty?
+ end
+ end
+ rescue StandardError => e
+ logger.error "Error processing tar.gz: #{e.message}"
+ return Set.new
+ end
+
+ fingerprints
+ end
+
+ # Write a set of fingerprints to file
+ def write_fingerprints_to_file(fingerprints, output_file)
+ File.open(output_file, 'w') do |f|
+ fingerprints.each { |fp| f.puts(fp) }
+ end
+ logger.info "Wrote #{fingerprints.size} fingerprints to #{output_file}" if logger
+ end
+
+ private
+
+ def process_json_line(line, queries)
+ data = JSON.parse(line)
+ queries << data if data['fingerprint']
+ rescue JSON::ParserError
+ # Skip invalid JSON
+ end
+end
+
+# Command-line script functionality
+if __FILE__ == $PROGRAM_NAME
+ if ARGV.size < 2
+ puts "Usage: #{$PROGRAM_NAME} "
+ exit 1
+ end
+
+ input_file = ARGV[0]
+ output_file = ARGV[1]
+ logger = Logger.new($stdout)
+
+ unless File.exist?(input_file)
+ logger.error "Input file not found - #{input_file}"
+ exit 1
+ end
+
+ begin
+ extractor = SQLFingerprintExtractor.new
+ fingerprints = extractor.extract_fingerprints_from_file(input_file)
+ extractor.write_fingerprints_to_file(fingerprints, output_file)
+ logger.info "Successfully extracted #{fingerprints.size} fingerprints to #{output_file}"
+ rescue StandardError => e
+ logger.error e.message.to_s
+ exit 1
+ end
+end
diff --git a/spec/config/settings_spec.rb b/spec/config/settings_spec.rb
index c76aaf67a8a..a3ff76596a9 100644
--- a/spec/config/settings_spec.rb
+++ b/spec/config/settings_spec.rb
@@ -72,13 +72,6 @@ RSpec.describe Settings, feature_category: :system_access do
end
end
- describe '.attr_encrypted_db_key_base_truncated' do
- it 'returns the first item from #db_key_base_keys_truncated' do
- expect(described_class.attr_encrypted_db_key_base_truncated)
- .to eq(described_class.db_key_base_keys_truncated.first)
- end
- end
-
describe '.db_key_base_keys_truncated' do
it 'is an array of string with maximum 32 bytes size' do
described_class.db_key_base_keys_truncated.each do |key|
@@ -87,13 +80,6 @@ RSpec.describe Settings, feature_category: :system_access do
end
end
- describe '.attr_encrypted_db_key_base_32' do
- it 'returns the first item from #db_key_base_keys_32_bytes' do
- expect(described_class.attr_encrypted_db_key_base_32)
- .to eq(described_class.db_key_base_keys_32_bytes.first)
- end
- end
-
describe '.db_key_base_keys_32_bytes' do
context 'when db key base secret is less than 32 bytes' do
before do
@@ -141,13 +127,6 @@ RSpec.describe Settings, feature_category: :system_access do
end
end
- describe '.attr_encrypted_db_key_base' do
- it 'returns the first item from #attr_encrypted_db_key_base' do
- expect(described_class.attr_encrypted_db_key_base)
- .to eq(described_class.db_key_base_keys.first)
- end
- end
-
describe '.db_key_base_keys' do
before do
allow(Gitlab::Application.credentials)
diff --git a/spec/dot_gitlab_ci/rules_spec.rb b/spec/dot_gitlab_ci/rules_spec.rb
index 99c38105985..9e11c303986 100644
--- a/spec/dot_gitlab_ci/rules_spec.rb
+++ b/spec/dot_gitlab_ci/rules_spec.rb
@@ -67,6 +67,14 @@ RSpec.describe '.gitlab/ci/rules.gitlab-ci.yml', feature_category: :tooling do
next
end
+ # exception: `.if-default-branch-schedule-weekly` should both be set to "never"
+ # because the weekly job is a small subset of tests. We don't want to run either jobs.
+ if base['if'] == config['.if-default-branch-schedule-weekly']['if']
+ expect(derived).to eq(base)
+ expect(derived['when']).to eq('never')
+ next
+ end
+
# exception: `.if-merge-request-not-approved` in the base should be `.if-merge-request-approved` in derived.
# The base wants to run when the MR is approved, and the derived wants to run if it's not approved,
# and both are specifying this with `when: never`.
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index 5e51be9b27c..6e277a2567b 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -694,15 +694,22 @@ RSpec.describe MergeRequestsFinder, feature_category: :code_review_workflow do
context 'review state filtering' do
let(:params) { { review_state: 'requested_changes' } }
- let(:expected_mr) { [merge_request1] }
+ let(:expected_mr) { [merge_request1, merge_request3] }
subject { described_class.new(user, params).execute }
before do
merge_request1.merge_request_reviewers.update_all(state: :requested_changes)
+ merge_request3.merge_request_reviewers.update_all(state: :requested_changes)
end
it { is_expected.to contain_exactly(*expected_mr) }
+
+ context 'when ignoring a reviewer' do
+ let(:params) { { review_state: 'requested_changes', ignored_reviewer_username: user2.username } }
+
+ it { is_expected.to contain_exactly(merge_request3) }
+ end
end
context 'multiple review state filtering' do
@@ -727,6 +734,12 @@ RSpec.describe MergeRequestsFinder, feature_category: :code_review_workflow do
end
it { is_expected.to contain_exactly(*expected_mr) }
+
+ context 'when ignoring a reviewer' do
+ let(:params) { { not: { review_states: %w[requested_changes reviewed] }, ignored_reviewer_username: user2.username } }
+
+ it { is_expected.to contain_exactly(*expected_mr) }
+ end
end
end
diff --git a/spec/frontend/ci/common/pipeline_inputs/pipeline_inputs_form_spec.js b/spec/frontend/ci/common/pipeline_inputs/pipeline_inputs_form_spec.js
index 4c4875e497b..fba05c72695 100644
--- a/spec/frontend/ci/common/pipeline_inputs/pipeline_inputs_form_spec.js
+++ b/spec/frontend/ci/common/pipeline_inputs/pipeline_inputs_form_spec.js
@@ -139,15 +139,21 @@ describe('PipelineInputsForm', () => {
});
describe('with empty ref (error case)', () => {
- beforeEach(() => {
- pipelineInputsHandler = jest.fn().mockResolvedValue(mockPipelineInputsErrorResponse);
- });
-
it('handles GraphQL error', async () => {
+ pipelineInputsHandler = jest.fn().mockResolvedValue(mockPipelineInputsErrorResponse);
await createComponent();
expect(createAlert).toHaveBeenCalledWith({
- message: 'There was a problem fetching the pipeline inputs.',
+ message: 'ref can only be an existing branch or tag',
+ });
+ });
+
+ it('handles generic error', async () => {
+ pipelineInputsHandler = jest.fn().mockRejectedValue('Error');
+ await createComponent();
+
+ expect(createAlert).toHaveBeenCalledWith({
+ message: 'There was a problem fetching the pipeline inputs. Please try again.',
});
});
});
diff --git a/spec/frontend/ml/model_registry/apps/show_ml_model_spec.js b/spec/frontend/ml/model_registry/apps/show_ml_model_spec.js
index 89e2998a44d..16d2c68b0c9 100644
--- a/spec/frontend/ml/model_registry/apps/show_ml_model_spec.js
+++ b/spec/frontend/ml/model_registry/apps/show_ml_model_spec.js
@@ -359,8 +359,8 @@ describe('ml/model_registry/apps/show_ml_model', () => {
const findAvatar = () => wrapper.findComponent(GlAvatar);
const findLatestVersionLink = () => wrapper.findByTestId('sidebar-latest-version-link');
const findVersionCount = () => wrapper.findByTestId('sidebar-version-count');
- const findExperimentTitle = () => wrapper.findByTestId('sidebar-experiment-title');
- const findExperiment = () => wrapper.findByTestId('sidebar-experiment-label');
+ const findExperiment = () => wrapper.findByTestId('sidebar-experiment');
+ const findExperimentLabel = () => wrapper.findByTestId('sidebar-experiment-label');
it('displays sidebar author link', () => {
expect(findSidebarAuthorLink().attributes('href')).toBe('path/to/user');
@@ -391,16 +391,16 @@ describe('ml/model_registry/apps/show_ml_model', () => {
});
describe('displays experiment information', () => {
- it('displays experiment title', () => {
- expect(findExperimentTitle().text()).toBe('Experiment');
+ it('displays experiment', () => {
+ expect(findExperiment().exists()).toBe(true);
});
it('displays experiment label', () => {
- expect(findExperiment().text()).toBe('Default experiment');
+ expect(findExperimentLabel().text()).toBe('Default experiment');
});
it('shows a link to the default experiment', () => {
- expect(findExperiment().findComponent(GlLink).attributes('href')).toBe(
+ expect(findExperimentLabel().findComponent(GlLink).attributes('href')).toBe(
'path/to/experiment',
);
});
@@ -423,7 +423,6 @@ describe('ml/model_registry/apps/show_ml_model', () => {
});
it('does not display sidebar experiment information', () => {
- expect(findExperimentTitle().exists()).toBe(false);
expect(findExperiment().exists()).toBe(false);
});
});
diff --git a/spec/frontend/projects/new_v2/components/shared_project_creation_fields_spec.js b/spec/frontend/projects/new_v2/components/shared_project_creation_fields_spec.js
index 03b7626f06d..3ea8a1b4bd7 100644
--- a/spec/frontend/projects/new_v2/components/shared_project_creation_fields_spec.js
+++ b/spec/frontend/projects/new_v2/components/shared_project_creation_fields_spec.js
@@ -1,8 +1,9 @@
import { nextTick } from 'vue';
-import { GlFormInput } from '@gitlab/ui';
+import { GlFormInput, GlFormSelect } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import SharedProjectCreationFields from '~/projects/new_v2/components/shared_project_creation_fields.vue';
import NewProjectDestinationSelect from '~/projects/new_v2/components/project_destination_select.vue';
+import { DEPLOYMENT_TARGET_SELECTIONS } from '~/projects/new_v2/form_constants';
describe('Project creation form fields component', () => {
let wrapper;
@@ -23,6 +24,7 @@ describe('Project creation form fields component', () => {
},
stubs: {
GlFormInput,
+ GlFormSelect,
},
});
};
@@ -34,6 +36,19 @@ describe('Project creation form fields component', () => {
const findProjectNameInput = () => wrapper.findByTestId('project-name-input');
const findProjectSlugInput = () => wrapper.findByTestId('project-slug-input');
const findNamespaceSelect = () => wrapper.findComponent(NewProjectDestinationSelect);
+ const findDeploymentTargetSelect = () => wrapper.findByTestId('deployment-target-select');
+ const findKubernetesHelpLink = () => wrapper.findByTestId('kubernetes-help-link');
+
+ describe('target select', () => {
+ it('renders the optional deployment target select', () => {
+ expect(findDeploymentTargetSelect().exists()).toBe(true);
+ expect(findKubernetesHelpLink().exists()).toBe(false);
+ });
+
+ it('has all the options', () => {
+ expect(findDeploymentTargetSelect().props('options')).toEqual(DEPLOYMENT_TARGET_SELECTIONS);
+ });
+ });
it('updates project slug according to a project name', async () => {
// NOTE: vue3 test needs the .setValue(value) and the vm.$emit('input'),
diff --git a/spec/helpers/git_helper_spec.rb b/spec/helpers/git_helper_spec.rb
index 57799462431..68ba460e038 100644
--- a/spec/helpers/git_helper_spec.rb
+++ b/spec/helpers/git_helper_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GitHelper do
+RSpec.describe GitHelper, feature_category: :source_code_management do
describe '#short_sha' do
let(:short_sha) { helper.short_sha('d4e043f6c20749a3ab3f4b8e23f2a8979f4b9100') }
@@ -27,6 +27,12 @@ RSpec.describe GitHelper do
it { expect(strip_signature).to eq("this is Roger's signed tag\n\n") }
end
+
+ context 'strips SSH MESSAGE' do
+ let(:strip_signature) { helper.strip_signature(ssh_message_tag) }
+
+ it { expect(strip_signature).to eq("Version 1.70.0\n\n") }
+ end
end
def pgp_signature_tag
@@ -63,6 +69,23 @@ RSpec.describe GitHelper do
SIGNATURE
end
+ def ssh_message_tag
+ <<~SIGNATURE
+ Version 1.70.0
+ -----BEGIN SSH SIGNATURE-----
+
+ iQEzBAABCAAdFiEEFMo1pwRq9j04Jovq68Q/GjfvLIoFAl2l64QACgkQ68Q/Gjfv
+ LIqRDggAm0d1ceVRsfldlwC6guR2ly8aWoTtZZ19E12bsfXd4lJqcQv7JXTP0og0
+ cwbV0l92iBJKGW6bFBipKDFmSgr5le5zFsXYOr9bJCQNOhFNMmtAgaHEIeVI16+c
+ S3pA+qIe516d4wRs/hcbxDJKC68iIlDaLXObdzTTLGMgbCYBFTjYJldNUfTkdvbB
+ oGEpFXuxV9EyfBtPLsz2vUea5GdZcRSVyJbcgm9ZU+ekdLZckroP5M0I5SQTbD3y
+ VrbCY3ziYtau4zK4cV66ybRz1G7tR6dcoC4kGUbaZlKsVZ1Af80agx2d9k5MR1wS
+ 4OFe1H0zIfpPRFsyX2toaum3EX6QBA==
+ =hefg
+ -----END SSH SIGNATURE-----
+ SIGNATURE
+ end
+
def x509_message_tag
<<~SIGNATURE
this is Roger's signed tag
diff --git a/spec/lib/ci/pipeline_creation/inputs/spec_inputs_spec.rb b/spec/lib/ci/pipeline_creation/inputs/spec_inputs_spec.rb
index 18e0fd3a25f..970f66f4910 100644
--- a/spec/lib/ci/pipeline_creation/inputs/spec_inputs_spec.rb
+++ b/spec/lib/ci/pipeline_creation/inputs/spec_inputs_spec.rb
@@ -56,6 +56,32 @@ RSpec.describe Ci::PipelineCreation::Inputs::SpecInputs, feature_category: :pipe
expect(spec_inputs.errors).to be_empty
end
end
+
+ context 'with spec is not a hash' do
+ let(:specs) { 'this is a string' }
+
+ before do
+ stub_const('TranslationStub', Module.new do
+ def s_(message, *_args)
+ message
+ end
+ end)
+
+ described_class.include(TranslationStub)
+ end
+
+ it 'adds error message about invalid input specification' do
+ spec_inputs = described_class.new(specs)
+ expect(spec_inputs.errors).to include(
+ a_string_matching(/Invalid input specification: expected a hash-like object/)
+ )
+ end
+
+ it 'returns empty inputs' do
+ spec_inputs = described_class.new(specs)
+ expect(spec_inputs.all_inputs).to be_empty
+ end
+ end
end
describe '#all_inputs' do
diff --git a/spec/lib/gitlab/background_migration/encrypt_ci_trigger_token_spec.rb b/spec/lib/gitlab/background_migration/encrypt_ci_trigger_token_spec.rb
index b01040adadd..f49aeec3d82 100644
--- a/spec/lib/gitlab/background_migration/encrypt_ci_trigger_token_spec.rb
+++ b/spec/lib/gitlab/background_migration/encrypt_ci_trigger_token_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::BackgroundMigration::EncryptCiTriggerToken, feature_categ
ci_trigger.send :attr_encrypted, :encrypted_token_tmp,
attribute: :encrypted_token,
mode: :per_attribute_iv,
- key: Settings.attr_encrypted_db_key_base_32,
+ key: Settings.db_key_base_keys_32_bytes.first,
algorithm: 'aes-256-gcm',
encode: false
end
diff --git a/spec/lib/gitlab/database/truncate_taggings_spec.rb b/spec/lib/gitlab/database/truncate_taggings_spec.rb
new file mode 100644
index 00000000000..e777ea16f93
--- /dev/null
+++ b/spec/lib/gitlab/database/truncate_taggings_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::TruncateTaggings, feature_category: :database do
+ include MigrationsHelpers
+
+ before do
+ stub_feature_flags(disallow_database_ddl_feature_flags: false)
+ end
+
+ let(:taggings) { table(:taggings, database: :ci) }
+
+ describe '#execute' do
+ context 'when the table has data' do
+ before do
+ taggings.create!
+ end
+
+ context 'when executed on .com' do
+ before do
+ allow(Gitlab).to receive(:com_except_jh?).and_return(true)
+ end
+
+ it 'truncates taggings' do
+ recorder = ActiveRecord::QueryRecorder.new { described_class.new.execute }
+
+ expect(recorder.log).to include(/TRUNCATE TABLE "taggings"/)
+ end
+ end
+
+ it 'is a no-op everywhere else' do
+ recorder = ActiveRecord::QueryRecorder.new { described_class.new.execute }
+
+ expect(recorder.log).to be_empty
+ end
+ end
+
+ context 'when the table is empty' do
+ context 'when executed on .com' do
+ before do
+ allow(Gitlab).to receive(:com_except_jh?).and_return(true)
+ end
+
+ it 'does not truncate taggings' do
+ recorder = ActiveRecord::QueryRecorder.new { described_class.new.execute }
+
+ expect(recorder.log).not_to include(/TRUNCATE TABLE "taggings"/)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 4167daf2efd..46a3b3a9bc8 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -252,8 +252,9 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
describe '.review_states' do
let(:states) { MergeRequestReviewer.states[:requested_changes] }
+ let(:ignored_reviewer) { nil }
- subject(:merge_requests) { described_class.review_states(states) }
+ subject(:merge_requests) { described_class.review_states(states, ignored_reviewer) }
it 'returns MRs that have a reviewer with the passed state' do
expect(merge_requests).to eq([merge_request1])
@@ -264,14 +265,32 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
it { expect(merge_requests).to match_array([merge_request1, merge_request2]) }
end
+
+ context 'when ignoring a reviewer' do
+ let(:states) { [MergeRequestReviewer.states[:reviewed], MergeRequestReviewer.states[:requested_changes]] }
+ let(:ignored_reviewer) { user1 }
+
+ it { expect(merge_requests).to contain_exactly(merge_request2) }
+ end
end
describe '.no_review_states' do
let(:states) { [MergeRequestReviewer.states[:requested_changes]] }
+ let(:ignored_reviewer) { nil }
- subject(:merge_requests) { described_class.no_review_states(states) }
+ subject(:merge_requests) { described_class.no_review_states(states, ignored_reviewer) }
it { expect(merge_requests).to contain_exactly(merge_request2) }
+
+ context 'when ignoring a reviewer' do
+ let(:ignored_reviewer) { user2 }
+
+ before_all do
+ merge_request2.merge_request_reviewers.find_by(user_id: user2.id).update!(state: :requested_changes)
+ end
+
+ it { expect(merge_requests).to contain_exactly(merge_request2) }
+ end
end
describe '.assignee_or_reviewer' do
diff --git a/spec/requests/api/graphql/project/ci/pipeline_creation/input_spec.rb b/spec/requests/api/graphql/project/ci/pipeline_creation/input_spec.rb
index 6dcbf738635..827a0ac04d3 100644
--- a/spec/requests/api/graphql/project/ci/pipeline_creation/input_spec.rb
+++ b/spec/requests/api/graphql/project/ci/pipeline_creation/input_spec.rb
@@ -222,7 +222,7 @@ RSpec.describe 'Query.project.ciPipelineCreationInputs', feature_category: :pipe
post_graphql(query, current_user: user)
expect(graphql_errors)
- .to include(a_hash_including('message' => 'ref can only be an existing branch or tag'))
+ .to include(a_hash_including('message' => 'Can only run new pipelines for an existing branch or tag'))
end
end
end
diff --git a/spec/scripts/merge_request_query_differ_spec.rb b/spec/scripts/merge_request_query_differ_spec.rb
new file mode 100644
index 00000000000..718202e97af
--- /dev/null
+++ b/spec/scripts/merge_request_query_differ_spec.rb
@@ -0,0 +1,398 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+require_relative '../../scripts/merge_request_query_differ'
+
+RSpec.describe MergeRequestQueryDiffer, feature_category: :tooling do
+ let(:logger) { instance_double(Logger) }
+ let(:sql_fingerprint_extractor) { instance_double(SQLFingerprintExtractor) }
+
+ let(:file_content) do
+ %(
+ {"fingerprint":"def456","normalized":"SELECT * FROM projects WHERE user_id = $1"}
+ {"normalized":"SELECT * FROM issues"}
+ invalid json line,
+ {"fingerprint":"abc123","normalized":"SELECT * FROM users WHERE id = $1"}
+ )
+ end
+
+ let(:empty_file) { Tempfile.new(%w[mr_auto_explain.ndjson]) }
+ let(:temp_file) { Tempfile.new(%w[mr_auto_explain.ndjson]) }
+
+ before do
+ allow(Logger).to receive(:new).and_return(logger)
+ allow(SQLFingerprintExtractor).to receive(:new).and_return(sql_fingerprint_extractor)
+ allow(logger).to receive_messages(info: nil, warn: nil, error: nil)
+ allow(differ).to receive(:write_report)
+ end
+
+ subject(:differ) { described_class.new(empty_file.path, logger) }
+
+ describe "#run" do
+ context "when no queries are found in MR" do
+ it "exits early and writes an appropriate report" do
+ allow(sql_fingerprint_extractor).to receive(:extract_queries_from_file).and_return([])
+
+ result = differ.run
+
+ expect(result).to eq(0)
+ expect(differ).to have_received(:write_report).with(
+ differ.output_file,
+ "# SQL Query Analysis\n\nNo queries found in this MR."
+ )
+ end
+ end
+
+ context "when queries without fingerprints are found in MR" do
+ it "exits early without further processing" do
+ allow(differ).to receive(:get_master_fingerprints)
+ allow(sql_fingerprint_extractor).to receive(:extract_queries_from_file)
+ .and_return([{ 'normalized' => 'SELECT * FROM issues' }])
+
+ result = differ.run
+
+ expect(differ).not_to have_received(:get_master_fingerprints)
+ expect(result).to eq(0)
+ end
+ end
+
+ context "when no master fingerprints are found" do
+ it "exits early without comparing queries" do
+ allow(sql_fingerprint_extractor).to receive(:extract_queries_from_file)
+ .and_return([{ 'fingerprint' => 'fp1', 'normalized' => 'SELECT * FROM users' }])
+ allow(differ).to receive(:get_master_fingerprints).and_return(Set.new)
+ allow(differ).to receive(:filter_new_queries)
+
+ result = differ.run
+
+ expect(differ).not_to have_received(:filter_new_queries)
+ expect(result).to eq(0)
+ end
+ end
+
+ context "when everything works as expected" do
+ it "processes the entire pipeline from extraction to report generation" do
+ allow(sql_fingerprint_extractor).to receive(:extract_queries_from_file)
+ .and_return([
+ { 'fingerprint' => 'fp3', 'normalized' => 'SELECT * FROM issues' },
+ { 'fingerprint' => 'fp1', 'normalized' => 'SELECT * FROM users' },
+ { 'fingerprint' => 'fp2', 'normalized' => 'SELECT * FROM projects' }
+ ])
+ allow(differ).to receive(:get_master_fingerprints).and_return(Set.new(['fp1']))
+ allow(differ.report_generator).to receive(:generate).and_return("# Sample Test Report")
+
+ result = differ.run
+
+ expect(differ).to have_received(:write_report).with(differ.output_file, "# Sample Test Report")
+ expect(result).to eq(2) # Two new queries (fp2 and fp3)
+ end
+ end
+
+ context "when errors occur" do
+ it "handles errors gracefully" do
+ allow(sql_fingerprint_extractor).to receive(:extract_queries_from_file)
+ .and_raise(StandardError.new("Test error"))
+
+ result = differ.run
+
+ expect(differ).to have_received(:write_report).with(
+ differ.output_file,
+ "# SQL Query Analysis\n\n️ Analysis failed: Test error"
+ )
+ expect(result).to eq(0)
+ end
+ end
+ end
+
+ describe "#get_master_fingerprints" do
+ it "downloads and extracts fingerprints from the consolidated package" do
+ package_content = "mock_package_content"
+ master_fingerprints = Set.new(%w[fd96528f933e7661 b10ab3c1b7bf923e b65a3b193bb3d1fb])
+
+ allow(differ).to receive(:download_consolidated_package).and_return(package_content)
+ allow(sql_fingerprint_extractor).to receive(:extract_from_tar_gz)
+ .with(package_content)
+ .and_return(master_fingerprints)
+
+ result = differ.get_master_fingerprints
+
+ expect(result).to be_a(Set)
+ expect(result.size).to eq(3)
+ expect(result).to eq(master_fingerprints)
+ end
+
+ it "handles download failures" do
+ allow(differ).to receive(:download_consolidated_package).and_return(nil)
+
+ result = differ.get_master_fingerprints
+
+ expect(result).to be_a(Set)
+ expect(result).to be_empty
+ expect(logger).to have_received(:error).with("Failed to download consolidated package")
+ end
+
+ it "handles extraction errors" do
+ allow(differ).to receive(:download_consolidated_package).and_return("package_content")
+ allow(sql_fingerprint_extractor).to receive(:extract_from_tar_gz)
+ .and_raise(StandardError.new("Extraction error"))
+
+ result = differ.get_master_fingerprints
+
+ expect(result).to be_a(Set)
+ expect(result).to be_empty
+ expect(logger).to have_received(:error).with("Error loading master fingerprints: Extraction error")
+ end
+ end
+
+ describe "#filter_new_queries" do
+ let(:mr_queries) do
+ [
+ { 'fingerprint' => 'fp3', 'normalized' => 'SELECT * FROM issues' },
+ { 'fingerprint' => 'fp1', 'normalized' => 'SELECT * FROM users' },
+ { 'fingerprint' => 'fp2', 'normalized' => 'SELECT * FROM projects' }
+ ]
+ end
+
+ it "identifies queries with fingerprints not present in master" do
+ master_fingerprints = Set.new(['fp2'])
+ result = differ.filter_new_queries(mr_queries, master_fingerprints)
+ expect(result.pluck('fingerprint')).to contain_exactly('fp1', 'fp3')
+ end
+
+ it "filters out all queries when all fingerprints are in master" do
+ master_fingerprints = Set.new(%w[fp2 fp1 fp3])
+ result = differ.filter_new_queries(mr_queries, master_fingerprints)
+ expect(result).to be_empty
+ end
+
+ it "writes a report when no new queries are found" do
+ master_fingerprints = Set.new(%w[fp2 fp1 fp3])
+ differ.filter_new_queries(mr_queries, master_fingerprints)
+ expect(differ).to have_received(:write_report).with(differ.output_file, /No new SQL queries detected in this MR/)
+ end
+ end
+
+ describe "#download_consolidated_package" do
+ let(:url) { URI(MergeRequestQueryDiffer::CONSOLIDATED_FINGERPRINTS_URL) }
+ let(:max_size_mb) { 10 }
+
+ it "downloads the package when file size is acceptable" do
+ head_response = instance_double(Net::HTTPSuccess, is_a?: true, :[] => "5242880") # 5MB
+ package_content = "mock package content"
+
+ allow(differ).to receive(:make_request).with(url, method: :head, parse_json: false).and_return(head_response)
+ allow(differ).to receive(:make_request).with(url, method: :get, parse_json: false).and_return(package_content)
+
+ result = differ.download_consolidated_package(max_size_mb)
+ expect(result).to eq(package_content)
+ end
+
+ it "aborts download when file size is too large" do
+ head_response = instance_double(Net::HTTPSuccess, is_a?: true, :[] => ((max_size_mb + 1) * (1024**2)).to_s) # 11MB
+
+ allow(differ).to receive(:make_request).with(url, method: :head, parse_json: false).and_return(head_response)
+ allow(differ).to receive(:make_request).with(url, method: :get, parse_json: false)
+
+ result = differ.download_consolidated_package(max_size_mb)
+
+ expect(differ).to have_received(:make_request).with(url, method: :head, parse_json: false)
+ expect(differ).not_to have_received(:make_request).with(url, method: :get, parse_json: false)
+ expect(result).to be_nil
+ end
+
+ it "proceeds with download when size check fails" do
+ package_content = "mock package content"
+ allow(differ).to receive(:make_request)
+ .with(url, method: :head, parse_json: false)
+ .and_raise(StandardError.new("Size check failed"))
+ allow(differ).to receive(:make_request)
+ .with(url, method: :get, parse_json: false)
+ .and_return(package_content)
+
+ result = differ.download_consolidated_package(max_size_mb)
+
+ expect(result).to eq(package_content)
+ expect(logger).to have_received(:warn).with(/Warning: Could not validate file size/)
+ expect(differ).to have_received(:make_request).with(url, method: :get, parse_json: false)
+ end
+ end
+
+ describe "#make_request" do
+ let(:test_url) { URI("https://gitlab.example.com/foo/bar") }
+ let(:http) { instance_double(Net::HTTP) }
+ let(:request) { instance_double(Net::HTTP::Get) }
+ let(:success_response) { Net::HTTPSuccess.new('1.1', '200', 'OK') }
+
+ before do
+ allow(Net::HTTP).to receive(:new).with(any_args).and_return(http)
+ allow(http).to receive(:use_ssl=)
+ allow(http).to receive(:read_timeout=)
+ allow(success_response).to receive(:body).and_return('{"data":"success"}')
+ allow(http).to receive(:request).and_return(success_response)
+ end
+
+ context "with authentication headers" do
+ before do
+ allow(Net::HTTP::Get).to receive(:new).and_return(request)
+ allow(request).to receive(:[]=)
+ end
+
+ it "set PRIVATE-TOKEN when GITLAB_TOKEN present" do
+ stub_env('GITLAB_TOKEN', "test-gitlab-token")
+ differ.make_request(test_url)
+ expect(request).to have_received(:[]=).with('PRIVATE-TOKEN', "test-gitlab-token")
+ end
+
+ it "set JOB-TOKEN when CI_JOB_TOKEN present" do
+ stub_env('CI_JOB_TOKEN', "test-ci-job-token")
+ differ.make_request(test_url)
+ expect(request).to have_received(:[]=).with('JOB-TOKEN', "test-ci-job-token")
+ end
+
+ it "prefers GITLAB_TOKEN over CI_JOB_TOKEN" do
+ stub_env('CI_JOB_TOKEN', "test-ci-job-token")
+ stub_env('GITLAB_TOKEN', "test-gitlab-token")
+
+ differ.make_request(test_url)
+ expect(request).to have_received(:[]=).with('PRIVATE-TOKEN', "test-gitlab-token")
+ expect(request).not_to have_received(:[]=).with('JOB-TOKEN', "test-ci-job-token")
+ end
+ end
+
+ it "stops retrying after max attempts" do
+ result = differ.make_request(test_url, attempt: 4, max_attempts: 3)
+ expect(result).to eq([])
+ expect(logger).to have_received(:info).with("Maximum retry attempts (3) reached for rate limiting")
+ end
+
+ it "returns parsed JSON for successful requests" do
+ result = differ.make_request(test_url)
+ expect(result).to eq({ "data" => "success" })
+ end
+
+ it "returns raw response body when parse_json is false" do
+ allow(success_response).to receive(:body).and_return('raw response data')
+ result = differ.make_request(test_url, parse_json: false)
+ expect(result).to eq('raw response data')
+ end
+
+ it "supports HEAD requests" do
+ result = differ.make_request(test_url, method: :head, parse_json: false)
+ expect(result).to eq(success_response)
+ end
+
+ context "when handling errors" do
+ it "retries on common server errors" do
+ allow(http).to receive(:request).and_return(
+ Net::HTTPServiceUnavailable.new('1.1', '503', 'Service Unavailable'),
+ Net::HTTPTooManyRequests.new('1.1', '429', 'Too Many Requests'),
+ success_response
+ )
+ allow(differ).to receive(:sleep)
+
+ result = differ.make_request(test_url)
+ expect(result).to eq({ "data" => "success" })
+ end
+
+ it "returns empty json when parse_json is true" do
+ allow(http).to receive(:request).and_return(Net::HTTPFatalError)
+ expect(differ.make_request(test_url, method: :get, parse_json: true)).to eq([])
+ end
+
+ it "returns nil when parse json is false" do
+ allow(http).to receive(:request).and_return(Net::HTTPFatalError)
+ expect(differ.make_request(test_url, method: :get, parse_json: false)).to be_nil
+ end
+
+ it "logs error when resource not found" do
+ allow(http).to receive(:request).and_return(Net::HTTPNotFound.new('1.1', '404', 'Test 404'))
+ expect(differ.make_request(test_url, method: :get, parse_json: true)).to eq([])
+ expect(logger).to have_received(:error).with(/HTTP request failed: 404 - Test 404/)
+ end
+
+ it "logs error if an unsupported method is passed" do
+ result = differ.make_request(test_url, method: :put, parse_json: false)
+
+ expect(result).to be_nil
+ expect(logger).to have_received(:error).with(/Error making request: Unsupported HTTP method: put/)
+ end
+
+ it "returns nil and logs error when exception occurs" do
+ allow(http).to receive(:request).and_raise(StandardError.new("Testing Error"))
+
+ result = differ.make_request(test_url, parse_json: false)
+
+ expect(result).to be_nil
+ expect(logger).to have_received(:error).with("Error making request: Testing Error")
+ end
+
+ it "returns empty array and logs error when JSON parsing fails" do
+ allow(success_response).to receive(:body).and_return('invalid json')
+ result = differ.make_request(test_url)
+ expect(result).to eq([])
+ expect(logger).to have_received(:error).with(/Failed to parse JSON/)
+ end
+ end
+ end
+
+ describe "#write_report" do
+ subject(:differ) { described_class.new(empty_file, logger) }
+
+ before do
+ allow(logger).to receive(:info)
+ allow(differ).to receive(:write_report).and_call_original
+ allow(File).to receive(:write)
+ end
+
+ it "writes content to file and logs success" do
+ differ.write_report("test.txt", "content")
+ expect(logger).to have_received(:info).with("Report saved to test.txt")
+ end
+
+ it "logs errors when file write fails" do
+ allow(File).to receive(:write).and_raise(StandardError.new("Write error"))
+
+ differ.write_report("test.txt", "content")
+ expect(logger).to have_received(:error).with("Could not write report to file: Write error")
+ end
+ end
+
+ describe "ReportGenerator" do
+ let(:report_generator) { differ.report_generator }
+
+ it "generates a report with new queries" do
+ report = report_generator.generate([{ 'fingerprint' => 'fp1', 'normalized' => 'SELECT * FROM users' }])
+
+ expect(report).to include("# SQL Query Analysis")
+ expect(report).to include("Identified potential 1 new SQL queries")
+ expect(report).to include("Query 1")
+ expect(report).to include("SELECT * FROM users")
+ expect(report).to include("fp1")
+ end
+
+ it "includes execution plans when available" do
+ report = report_generator.generate([
+ { 'fingerprint' => 'fp1', 'normalized' => 'SELECT * FROM users', 'plan' => { 'Node Type' => 'Index Scan' } }
+ ])
+ expect(report).to include("Execution Plan")
+ expect(report).to include("Index Scan")
+ end
+
+ it "handles empty query list" do
+ report = report_generator.generate([])
+ expect(report).to include("No new SQL queries detected in this MR")
+ end
+
+ it "formats hash plan" do
+ hash_plan = { 'Node Type' => 'Index Scan' }
+ result = report_generator.format_plan(hash_plan)
+ expect(result).to include(' "Node Type": "Index Scan"')
+ end
+
+ it "formats non hash plan" do
+ result = report_generator.format_plan(1234)
+ expect(result).to include('1234')
+ end
+ end
+end
diff --git a/spec/scripts/sql_fingerprint_extractor_spec.rb b/spec/scripts/sql_fingerprint_extractor_spec.rb
new file mode 100644
index 00000000000..805e5c0cbcb
--- /dev/null
+++ b/spec/scripts/sql_fingerprint_extractor_spec.rb
@@ -0,0 +1,194 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+require_relative '../../scripts/sql_fingerprint_extractor'
+
+RSpec.describe SQLFingerprintExtractor, feature_category: :tooling do
+ let(:logger) { instance_double(Logger, info: nil, warn: nil, error: nil) }
+ let(:extractor) { described_class.new(logger) }
+
+ describe '#initialize' do
+ it 'uses the provided logger' do
+ expect(extractor.logger).to eq(logger)
+ end
+
+ it 'creates a default logger if none provided' do
+ allow(Logger).to receive(:new).with($stdout).and_call_original
+ expect(described_class.new.logger).to be_a(Logger)
+ end
+ end
+
+ describe '#extract_queries_from_file' do
+ context 'with a regular text file' do
+ let(:file_path) { 'test_queries.ndjson' }
+ let(:valid_line) { '{"fingerprint":"def123","normalized":"SELECT * FROM accounts"}' }
+ let(:second_valid_line) { '{"fingerprint":"abc123","normalized":"SELECT * FROM users"}' }
+ let(:invalid_line) { 'invalid json' }
+ let(:no_fingerprint) { '{"normalized":"SELECT * FROM users"}' }
+
+ before do
+ allow(File).to receive(:foreach)
+ .with(file_path)
+ .and_yield(valid_line)
+ .and_yield(invalid_line)
+ .and_yield(no_fingerprint)
+ .and_yield(second_valid_line)
+ end
+
+ it 'extracts valid queries with fingerprints' do
+ queries = extractor.extract_queries_from_file(file_path)
+ expect(queries.size).to eq(2)
+ expect(queries.first['fingerprint']).to eq('def123')
+ expect(queries.first['normalized']).to eq('SELECT * FROM accounts')
+ expect(queries.second['fingerprint']).to eq('abc123')
+ expect(queries.second['normalized']).to eq('SELECT * FROM users')
+ end
+
+ it 'logs the extraction process' do
+ expect(logger).to receive(:info).with("Extracting queries from file: #{file_path}")
+ expect(logger).to receive(:info).with(/Extracted \d+ queries from file:/)
+ extractor.extract_queries_from_file(file_path)
+ end
+ end
+
+ context 'with a gzipped file' do
+ let(:gz_file_path) { 'test_queries.ndjson.gz' }
+ let(:gz_reader) { instance_double(Zlib::GzipReader) }
+ let(:valid_line) { '{"fingerprint":"def456","normalized":"SELECT * FROM posts"}' }
+
+ before do
+ allow(Zlib::GzipReader).to receive(:open).with(gz_file_path).and_yield(gz_reader)
+ allow(gz_reader).to receive(:each_line).and_yield(valid_line)
+ end
+
+ it 'extracts queries from a gzipped file' do
+ queries = extractor.extract_queries_from_file(gz_file_path)
+ expect(queries.size).to eq(1)
+ expect(queries.first['fingerprint']).to eq('def456')
+ end
+ end
+
+ context 'when an error occurs' do
+ let(:file_path) { 'nonexistent_file.ndjson' }
+
+ before do
+ allow(File).to receive(:foreach).with(file_path).and_raise(StandardError.new('File read error'))
+ end
+
+ it 'logs the error and returns an empty array' do
+ expect(logger).to receive(:warn).with("Warning: Error reading file: File read error")
+ queries = extractor.extract_queries_from_file(file_path)
+ expect(queries).to be_empty
+ end
+ end
+ end
+
+ describe '#extract_fingerprints_from_file' do
+ let(:file_path) { 'test_queries.ndjson' }
+ let(:queries) { [{ 'fingerprint' => 'abc123' }, { 'fingerprint' => 'def456' }, {}] }
+
+ before do
+ allow(extractor).to receive(:extract_queries_from_file).with(file_path).and_return(queries)
+ end
+
+ it 'extracts only the fingerprints as a Set' do
+ fingerprints = extractor.extract_fingerprints_from_file(file_path)
+ expect(fingerprints).to be_a(Set)
+ expect(fingerprints.size).to eq(2)
+ expect(fingerprints).to include('abc123', 'def456')
+ end
+ end
+
+ describe '#extract_from_tar_gz' do
+ let(:tar_gz_content) { 'mock_tar_gz_content' }
+ let(:string_io) { instance_double(StringIO) }
+ let(:gzip_reader) { instance_double(Zlib::GzipReader) }
+ let(:tar_reader) { instance_double(Gem::Package::TarReader) }
+ let(:entry) { instance_double(Gem::Package::TarReader::Entry, file?: true, directory?: false) }
+ let(:entry_header) { instance_double(Gem::Package::TarHeader, size: 1000) }
+ let(:entry_content) { "fingerprint1\nfingerprint2\n" }
+
+ before do
+ allow(StringIO).to receive(:new).with(tar_gz_content).and_return(string_io)
+ allow(Zlib::GzipReader).to receive(:new).with(string_io).and_return(gzip_reader)
+ allow(Gem::Package::TarReader).to receive(:new).with(gzip_reader).and_return(tar_reader)
+ allow(tar_reader).to receive(:each).and_yield(entry)
+ allow(entry).to receive_messages(header: entry_header, read: entry_content)
+ end
+
+ it 'extracts fingerprints from tar.gz content' do
+ fingerprints = extractor.extract_from_tar_gz(tar_gz_content)
+ expect(fingerprints).to be_a(Set)
+ expect(fingerprints.size).to eq(2)
+ expect(fingerprints).to include('fingerprint1', 'fingerprint2')
+ end
+
+ context 'when file is too large' do
+ let(:max_size_mb) { 0.001 } # 1KB max
+ let(:entry_header) { instance_double(Gem::Package::TarHeader, size: 2000) } # 2KB file size
+
+ it 'logs the error and returns empty set' do
+ expect(logger).to receive(:error).with(/File too large:/)
+ fingerprints = extractor.extract_from_tar_gz(tar_gz_content, max_size_mb)
+ expect(fingerprints).to be_a(Set)
+ expect(fingerprints).to be_empty
+ end
+ end
+
+ context 'when an error occurs' do
+ before do
+ allow(StringIO).to receive(:new).with(tar_gz_content).and_raise(StandardError.new('Tar.gz processing error'))
+ end
+
+ it 'logs the error and returns empty set' do
+ expect(logger).to receive(:error).with("Error processing tar.gz: Tar.gz processing error")
+ fingerprints = extractor.extract_from_tar_gz(tar_gz_content)
+ expect(fingerprints).to be_a(Set)
+ expect(fingerprints).to be_empty
+ end
+ end
+ end
+
+ describe '#write_fingerprints_to_file' do
+ let(:fingerprints) { Set.new(%w[abc123 def456]) }
+ let(:output_file) { 'output_fingerprints.txt' }
+ let(:file) { instance_double(File) }
+
+ before do
+ allow(File).to receive(:open).with(output_file, 'w').and_yield(file)
+ allow(file).to receive(:puts)
+ end
+
+ it 'writes each fingerprint to the file' do
+ expect(file).to receive(:puts).with('abc123')
+ expect(file).to receive(:puts).with('def456')
+ extractor.write_fingerprints_to_file(fingerprints, output_file)
+ end
+
+ it 'logs the number of fingerprints written' do
+ expect(logger).to receive(:info).with("Wrote 2 fingerprints to output_fingerprints.txt")
+ extractor.write_fingerprints_to_file(fingerprints, output_file)
+ end
+ end
+
+ describe '#process_json_line' do
+ let(:queries) { [] }
+
+ it 'adds valid queries with fingerprints' do
+ extractor.send(:process_json_line, '{"fingerprint":"abc123"}', queries)
+ expect(queries.size).to eq(1)
+ expect(queries.first['fingerprint']).to eq('abc123')
+ end
+
+ it 'skips lines without fingerprints' do
+ extractor.send(:process_json_line, '{"other":"value"}', queries)
+ expect(queries).to be_empty
+ end
+
+ it 'handles JSON parse errors' do
+ extractor.send(:process_json_line, 'invalid json', queries)
+ expect(queries).to be_empty
+ end
+ end
+end
diff --git a/spec/services/ci/pipeline_creation/find_pipeline_inputs_service_spec.rb b/spec/services/ci/pipeline_creation/find_pipeline_inputs_service_spec.rb
index 818b4021753..472b631a213 100644
--- a/spec/services/ci/pipeline_creation/find_pipeline_inputs_service_spec.rb
+++ b/spec/services/ci/pipeline_creation/find_pipeline_inputs_service_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe Ci::PipelineCreation::FindPipelineInputsService, feature_category
result = service.execute
expect(result).to be_error
- expect(result.message).to eq('insufficient permissions to read inputs')
+ expect(result.message).to eq(s_('Pipelines|Insufficient permissions to read inputs'))
end
end
@@ -90,7 +90,7 @@ RSpec.describe Ci::PipelineCreation::FindPipelineInputsService, feature_category
result = service.execute
expect(result).to be_error
- expect(result.message).to eq('ref can only be an existing branch or tag')
+ expect(result.message).to eq(s_('Pipelines|Can only run new pipelines for an existing branch or tag'))
end
end
@@ -101,7 +101,7 @@ RSpec.describe Ci::PipelineCreation::FindPipelineInputsService, feature_category
result = service.execute
expect(result).to be_error
- expect(result.message).to eq('ref can only be an existing branch or tag')
+ expect(result.message).to eq(s_('Pipelines|Can only run new pipelines for an existing branch or tag'))
end
end
@@ -143,7 +143,7 @@ RSpec.describe Ci::PipelineCreation::FindPipelineInputsService, feature_category
result = service.execute
expect(result).to be_error
- expect(result.message).to eq('invalid YAML config')
+ expect(result.message).to eq(s_('Pipelines|Invalid YAML syntax'))
end
end
end
@@ -191,7 +191,7 @@ RSpec.describe Ci::PipelineCreation::FindPipelineInputsService, feature_category
result = service.execute
expect(result).to be_error
- expect(result.message).to eq('inputs not supported for this CI config source')
+ expect(result.message).to eq(s_('Pipelines|Inputs not supported for this CI config source'))
end
end
end
diff --git a/spec/services/ci/pipeline_triggers/create_service_spec.rb b/spec/services/ci/pipeline_triggers/create_service_spec.rb
index 8cef6eb2195..8b0147711f6 100644
--- a/spec/services/ci/pipeline_triggers/create_service_spec.rb
+++ b/spec/services/ci/pipeline_triggers/create_service_spec.rb
@@ -78,7 +78,8 @@ RSpec.describe Ci::PipelineTriggers::CreateService, feature_category: :continuou
max_expiry_date = Date.current.advance(days: PersonalAccessToken::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS)
error_text = format(_("must be before %{expiry_date}"), expiry_date: max_expiry_date)
- it 'fails validation when trigger_token_expiration feature flag on' do
+ it 'fails validation when trigger_token_expiration feature flag on',
+ quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/524233' do
stub_feature_flags(trigger_token_expiration: true)
response = service.execute
diff --git a/vite.config.js b/vite.config.js
index 5079627ab91..224b46a524a 100644
--- a/vite.config.js
+++ b/vite.config.js
@@ -23,6 +23,7 @@ import { FixedRubyPlugin } from './config/helpers/vite_plugin_ruby_fixed.mjs';
import { StylePlugin } from './config/helpers/vite_plugin_style.mjs';
import { IconsPlugin } from './config/helpers/vite_plugin_icons.mjs';
import { ImagesPlugin } from './config/helpers/vite_plugin_images.mjs';
+import { CrossOriginWorkerPlugin } from './config/helpers/vite_plugin_cross_origin_worker';
let viteGDKConfig;
try {
@@ -110,6 +111,7 @@ export default defineConfig({
viteCommonjs({
include: [path.resolve(javascriptsPath, 'locale/ensure_single_line.cjs')],
}),
+ CrossOriginWorkerPlugin(),
],
define: {
// window can be undefined in a Web Worker
@@ -130,6 +132,8 @@ export default defineConfig({
'process.env.PDF_JS_CMAPS_UBLIC_PATH': JSON.stringify(PDF_JS_CMAPS_PUBLIC_PATH),
},
server: {
+ // this fixes Vite server being unreachable on some configurations
+ host: '0.0.0.0',
cors: true,
warmup: {
clientFiles: ['javascripts/entrypoints/main.js', 'javascripts/entrypoints/super_sidebar.js'],