-
-
+
+
diff --git a/app/controllers/import/bulk_imports_controller.rb b/app/controllers/import/bulk_imports_controller.rb
index d6bfcb35788..079324bfe4d 100644
--- a/app/controllers/import/bulk_imports_controller.rb
+++ b/app/controllers/import/bulk_imports_controller.rb
@@ -68,7 +68,7 @@ class Import::BulkImportsController < ApplicationController
end
::BulkImports::CreateService.new(
- current_user, entry, credentials, fallback_organization: Current.organization
+ current_user, entry.to_h, credentials, fallback_organization: Current.organization
).execute
end
diff --git a/app/models/merge_request/diff_commit_user.rb b/app/models/merge_request/diff_commit_user.rb
index 3fc5c9318a4..bc82c1f47e9 100644
--- a/app/models/merge_request/diff_commit_user.rb
+++ b/app/models/merge_request/diff_commit_user.rb
@@ -20,19 +20,35 @@ class MergeRequest::DiffCommitUser < ApplicationRecord
end
# Creates a new row, or returns an existing one if a row already exists.
- def self.find_or_create(name, email)
- find_or_create_by!(name: name, email: email)
+ def self.find_or_create(name, email, organization_id, with_organization: false)
+ return find_or_create_by!(name: name, email: email) unless with_organization
+
+ # Try to find exact match first
+ result = find_by(name: name, email: email, organization_id: organization_id)
+
+ # If not found, look for one with nil organization_id
+ if !result && organization_id.present?
+ result = find_by(name: name, email: email, organization_id: nil)
+ result.update!(organization_id: organization_id) if result
+ end
+
+ # If still not found, try to create using find_or_create_by!
+ result || find_or_create_by!(name: name, email: email, organization_id: organization_id)
rescue ActiveRecord::RecordNotUnique
retry
end
- # Finds many (name, email) pairs in bulk.
- def self.bulk_find(pairs)
+ # Finds many (name, email) pairs or (name, email, organization_id) triples in bulk.
+ def self.bulk_find(input, with_organization: false)
queries = {}
rows = []
- pairs.each do |(name, email)|
- queries[[name, email]] = where(name: name, email: email).to_sql
+ input.each do |item|
+ name, email, organization_id = item
+ conditions = { name: name, email: email }
+ conditions[:organization_id] = organization_id if with_organization
+
+ queries[conditions.values] = where(conditions).to_sql
end
# We may end up having to query many users. To ensure we don't hit any
@@ -44,33 +60,100 @@ class MergeRequest::DiffCommitUser < ApplicationRecord
rows
end
- # Finds or creates rows for the given pairs of names and Emails.
+ # Finds or creates rows for the given pairs of names and Emails or
+ # triples of names, emails, and organization IDs.
#
- # The `names_and_emails` argument must be an Array/Set of tuples like so:
+ # The input argument must be an Array/Set of pairs or triples like so:
#
# [
- # [name, email],
- # [name, email],
+ # [name, email], # legacy format when with_organization: false
+ # [name, email, organization_id], # new format when with_organization: true
# ...
# ]
#
- # This method expects that the names and Emails have already been trimmed to
+ # This method expects that the names and emails have already been trimmed to
# at most 512 characters.
#
- # The return value is a Hash that maps these tuples to instances of this
- # model.
- def self.bulk_find_or_create(pairs)
+ # The return value is a Hash that maps these pairs/triples to instances of this model.
+ def self.bulk_find_or_create(input, with_organization: false)
+ return bulk_find_or_create_legacy(input) unless with_organization
+
+ mapping = {}
+ ids_to_update = []
+
+ # Extract organization_id - it's the same for all triples in this batch
+ organization_id = input.first&.last
+
+ # Find all existing records by (name, email) only
+ existing_records = bulk_find(input, with_organization: false)
+
+ existing_records.each do |row|
+ ids_to_update << row.id if row.organization_id.nil?
+ # Map all found records with the organization_id from input
+ mapping[[row.name, row.email, organization_id]] = row
+ end
+
+ # Bulk update organization_ids for records that had nil
+ if ids_to_update.any?
+ where(id: ids_to_update).update_all(organization_id: organization_id)
+
+ # Update the organization_id on the objects we already have
+ existing_records.each do |row|
+ row.organization_id = organization_id if ids_to_update.include?(row.id)
+ end
+ end
+
+ # Create missing records
+ create_missing_records(input, mapping)
+
+ # Handle concurrent inserts
+ handle_concurrent_inserts(input, mapping)
+
+ mapping
+ end
+
+ def self.create_missing_records(input, mapping)
+ create = []
+
+ # Collect records that need to be created
+ input.each do |(name, email, org_id)|
+ next if mapping[[name, email, org_id]]
+
+ create << { name: name, email: email, organization_id: org_id }
+ end
+
+ return if create.empty?
+
+ # Bulk insert new records
+ insert_all(create, returning: %w[id name email organization_id]).each do |row|
+ mapping[[row['name'], row['email'], row['organization_id']]] =
+ new(id: row['id'], name: row['name'], email: row['email'], organization_id: row['organization_id'])
+ end
+ end
+
+ def self.handle_concurrent_inserts(input, mapping)
+ # Find any records that were created concurrently
+ missing_triples = input.reject { |(name, email, org_id)| mapping.key?([name, email, org_id]) }
+
+ return if missing_triples.empty?
+
+ bulk_find(missing_triples, with_organization: true).each do |row|
+ mapping[[row.name, row.email, row.organization_id]] = row
+ end
+ end
+
+ def self.bulk_find_or_create_legacy(input)
mapping = {}
create = []
# Over time, fewer new rows need to be created. We take advantage of that
# here by first finding all rows that already exist, using a limited number
# of queries (in most cases only one query will be needed).
- bulk_find(pairs).each do |row|
+ bulk_find(input).each do |row|
mapping[[row.name, row.email]] = row
end
- pairs.each do |(name, email)|
+ input.each do |(name, email)|
create << { name: name, email: email } unless mapping[[name, email]]
end
@@ -86,10 +169,14 @@ class MergeRequest::DiffCommitUser < ApplicationRecord
# It's possible for (name, email) pairs to be inserted concurrently,
# resulting in the above insert not returning anything. Here we get any
# remaining users that were created concurrently.
- bulk_find(pairs.reject { |pair| mapping.key?(pair) }).each do |row|
+ bulk_find(input.reject { |pair| mapping.key?(pair) }).each do |row|
mapping[[row.name, row.email]] = row
end
mapping
end
+
+ private_class_method :bulk_find_or_create_legacy,
+ :create_missing_records,
+ :handle_concurrent_inserts
end
diff --git a/app/models/merge_request_diff.rb b/app/models/merge_request_diff.rb
index e0d487f110f..96c0633c33a 100644
--- a/app/models/merge_request_diff.rb
+++ b/app/models/merge_request_diff.rb
@@ -898,7 +898,7 @@ class MergeRequestDiff < ApplicationRecord
end
def save_commits
- MergeRequestDiffCommit.create_bulk(self.id, compare.commits.reverse, skip_commit_data: Feature.enabled?(:optimized_commit_storage, project))
+ MergeRequestDiffCommit.create_bulk(self.id, compare.commits.reverse, project, skip_commit_data: Feature.enabled?(:optimized_commit_storage, project))
self.class.uncached { merge_request_diff_commits.reset }
end
diff --git a/app/models/merge_request_diff_commit.rb b/app/models/merge_request_diff_commit.rb
index ba8c756a7ae..3573bba176a 100644
--- a/app/models/merge_request_diff_commit.rb
+++ b/app/models/merge_request_diff_commit.rb
@@ -39,15 +39,25 @@ class MergeRequestDiffCommit < ApplicationRecord
# Deprecated; use `bulk_insert!` from `BulkInsertSafe` mixin instead.
# cf. https://gitlab.com/gitlab-org/gitlab/issues/207989 for progress
- def self.create_bulk(merge_request_diff_id, commits, skip_commit_data: false)
- commit_hashes, user_tuples = prepare_commits_for_bulk_insert(commits)
- users = MergeRequest::DiffCommitUser.bulk_find_or_create(user_tuples)
+ def self.create_bulk(merge_request_diff_id, commits, project, skip_commit_data: false)
+ organization_id = project.organization_id
+ with_organization = Feature.enabled?(:add_organization_to_diff_commit_users, project)
+ commit_hashes, user_triples = prepare_commits_for_bulk_insert(commits, organization_id)
+ users = MergeRequest::DiffCommitUser.bulk_find_or_create(
+ user_triples,
+ with_organization: with_organization
+ )
rows = commit_hashes.map.with_index do |commit_hash, index|
sha = commit_hash.delete(:id)
- author = users[[commit_hash[:author_name], commit_hash[:author_email]]]
- committer =
- users[[commit_hash[:committer_name], commit_hash[:committer_email]]]
+
+ if with_organization
+ author = users[[commit_hash[:author_name], commit_hash[:author_email], organization_id]]
+ committer = users[[commit_hash[:committer_name], commit_hash[:committer_email], organization_id]]
+ else
+ author = users[[commit_hash[:author_name], commit_hash[:author_email]]]
+ committer = users[[commit_hash[:committer_name], commit_hash[:committer_email]]]
+ end
# These fields are only used to determine the author/committer IDs, we
# don't store them in the DB.
@@ -80,8 +90,8 @@ class MergeRequestDiffCommit < ApplicationRecord
ApplicationRecord.legacy_bulk_insert(self.table_name, rows) # rubocop:disable Gitlab/BulkInsert
end
- def self.prepare_commits_for_bulk_insert(commits)
- user_tuples = Set.new
+ def self.prepare_commits_for_bulk_insert(commits, organization_id)
+ user_triples = Set.new
hashes = commits.map do |commit|
hash = commit.to_hash.except(:parent_ids, :referenced_by)
@@ -89,13 +99,13 @@ class MergeRequestDiffCommit < ApplicationRecord
hash[key] = MergeRequest::DiffCommitUser.prepare(hash[key])
end
- user_tuples << [hash[:author_name], hash[:author_email]]
- user_tuples << [hash[:committer_name], hash[:committer_email]]
+ user_triples << [hash[:author_name], hash[:author_email], organization_id]
+ user_triples << [hash[:committer_name], hash[:committer_email], organization_id]
hash
end
- [hashes, user_tuples]
+ [hashes, user_triples]
end
def self.oldest_merge_request_id_per_commit(project_id, shas)
diff --git a/app/services/auth/container_registry_authentication_service.rb b/app/services/auth/container_registry_authentication_service.rb
index e5fe46d739b..ea1494b35a7 100644
--- a/app/services/auth/container_registry_authentication_service.rb
+++ b/app/services/auth/container_registry_authentication_service.rb
@@ -137,8 +137,7 @@ module Auth
def self.patterns_metadata(project, user, actions)
{
- tag_deny_access_patterns: tag_deny_access_patterns(project, user, actions),
- tag_immutable_patterns: tag_immutable_patterns(project, actions)
+ tag_deny_access_patterns: tag_deny_access_patterns(project, user, actions)
}
end
@@ -177,14 +176,6 @@ module Auth
patterns
end
- def self.tag_immutable_patterns(project, actions)
- return if project.nil?
- return unless Feature.enabled?(:container_registry_immutable_tags, project)
- return unless (actions & %w[push delete *]).any?
-
- project.container_registry_protection_tag_rules.immutable.pluck_tag_name_patterns.presence
- end
-
def authorized_token(*accesses)
JSONWebToken::RSAToken.new(registry.key).tap do |token|
token.issuer = registry.issuer
diff --git a/config/feature_flags/beta/add_organization_to_diff_commit_users.yml b/config/feature_flags/beta/add_organization_to_diff_commit_users.yml
new file mode 100644
index 00000000000..0d9fc17f27f
--- /dev/null
+++ b/config/feature_flags/beta/add_organization_to_diff_commit_users.yml
@@ -0,0 +1,10 @@
+---
+name: add_organization_to_diff_commit_users
+description:
+feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/517132
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/186458
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/546221
+milestone: '18.1'
+group: group::code review
+type: beta
+default_enabled: false
diff --git a/config/feature_flags/gitlab_com_derisk/show_date_when_relative_time_over_a_year.yml b/config/feature_flags/gitlab_com_derisk/show_date_when_relative_time_over_a_year.yml
new file mode 100644
index 00000000000..5a663bf2564
--- /dev/null
+++ b/config/feature_flags/gitlab_com_derisk/show_date_when_relative_time_over_a_year.yml
@@ -0,0 +1,10 @@
+---
+name: show_date_when_relative_time_over_a_year
+description: Show date when relative time is over a year
+feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/18607
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/192402
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/547106
+milestone: '18.1'
+group: group::project management
+type: gitlab_com_derisk
+default_enabled: false
diff --git a/db/docs/batched_background_migrations/backfill_analyzer_project_statuses.yml b/db/docs/batched_background_migrations/backfill_analyzer_project_statuses.yml
index 72fdc1e5b85..1a017c7d5f8 100644
--- a/db/docs/batched_background_migrations/backfill_analyzer_project_statuses.yml
+++ b/db/docs/batched_background_migrations/backfill_analyzer_project_statuses.yml
@@ -4,5 +4,5 @@ description: Fills analyzer_project_statuses table with initial data
feature_category: security_asset_inventories
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/187214
milestone: '18.1'
-queued_migration_version: 20250513225311
+queued_migration_version: 20250603110954
finalized_by:
diff --git a/db/post_migrate/20250513225311_queue_backfill_analyzer_project_statuses.rb b/db/post_migrate/20250513225311_queue_backfill_analyzer_project_statuses.rb
index 30a5a4379fa..393aec9c58d 100644
--- a/db/post_migrate/20250513225311_queue_backfill_analyzer_project_statuses.rb
+++ b/db/post_migrate/20250513225311_queue_backfill_analyzer_project_statuses.rb
@@ -3,25 +3,15 @@
class QueueBackfillAnalyzerProjectStatuses < Gitlab::Database::Migration[2.3]
milestone '18.1'
- restrict_gitlab_migration gitlab_schema: :gitlab_sec
-
MIGRATION = "BackfillAnalyzerProjectStatuses"
- BATCH_SIZE = 1000
- SUB_BATCH_SIZE = 100
- DELAY_INTERVAL = 2.minutes
def up
- queue_batched_background_migration(
- MIGRATION,
- :vulnerability_statistics,
- :project_id,
- job_interval: DELAY_INTERVAL,
- batch_size: BATCH_SIZE,
- sub_batch_size: SUB_BATCH_SIZE
- )
+ # no-op because there was a bug in the original migration, which has been
+ # fixed by https://gitlab.com/gitlab-org/gitlab/-/merge_requests/193293
end
def down
- delete_batched_background_migration(MIGRATION, :vulnerability_statistics, :project_id, [])
+ # no-op because there was a bug in the original migration, which has been
+ # fixed by https://gitlab.com/gitlab-org/gitlab/-/merge_requests/193293
end
end
diff --git a/db/post_migrate/20250603110954_requeue_backfill_analyzer_project_statuses.rb b/db/post_migrate/20250603110954_requeue_backfill_analyzer_project_statuses.rb
new file mode 100644
index 00000000000..ea3e1eb5a7a
--- /dev/null
+++ b/db/post_migrate/20250603110954_requeue_backfill_analyzer_project_statuses.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+class RequeueBackfillAnalyzerProjectStatuses < Gitlab::Database::Migration[2.3]
+ milestone '18.1'
+
+ restrict_gitlab_migration gitlab_schema: :gitlab_sec
+
+ MIGRATION = "BackfillAnalyzerProjectStatuses"
+ BATCH_SIZE = 1000
+ SUB_BATCH_SIZE = 100
+ DELAY_INTERVAL = 2.minutes
+
+ def up
+ delete_batched_background_migration(MIGRATION, :vulnerability_statistics, :project_id, [])
+
+ queue_batched_background_migration(
+ MIGRATION,
+ :vulnerability_statistics,
+ :project_id,
+ job_interval: DELAY_INTERVAL,
+ batch_size: BATCH_SIZE,
+ sub_batch_size: SUB_BATCH_SIZE
+ )
+ end
+
+ def down
+ delete_batched_background_migration(MIGRATION, :vulnerability_statistics, :project_id, [])
+ end
+end
diff --git a/db/schema_migrations/20250603110954 b/db/schema_migrations/20250603110954
new file mode 100644
index 00000000000..09e21476202
--- /dev/null
+++ b/db/schema_migrations/20250603110954
@@ -0,0 +1 @@
+fbac3349407a71807fd63f0acd3b7b29b80d3a91fca93d4117955d9617662297
\ No newline at end of file
diff --git a/doc/ci/runners/hosted_runners/linux.md b/doc/ci/runners/hosted_runners/linux.md
index 94907620983..83c1b8fb055 100644
--- a/doc/ci/runners/hosted_runners/linux.md
+++ b/doc/ci/runners/hosted_runners/linux.md
@@ -24,23 +24,99 @@ The machine type and underlying processor type might change. Jobs optimized for
GitLab offers the following machine types for hosted runners on Linux x86-64.
-| Runner Tag | vCPUs | Memory | Storage |
-|--------------------------------------------------------|-------|--------|---------|
-| `saas-linux-small-amd64` (default) | 2 | 8 GB | 30 GB |
-| `saas-linux-medium-amd64` | 4 | 16 GB | 50 GB |
-| `saas-linux-large-amd64` (Premium and Ultimate only) | 8 | 32 GB | 100 GB |
-| `saas-linux-xlarge-amd64` (Premium and Ultimate only) | 16 | 64 GB | 200 GB |
-| `saas-linux-2xlarge-amd64` (Premium and Ultimate only) | 32 | 128 GB | 200 GB |
+
+
+
+
Runner Tag
+
vCPUs
+
Memory
+
Storage
+
+
+
+
+
+ saas-linux-small-amd64 (default)
+
+
2
+
8 GB
+
30 GB
+
+
+
+ saas-linux-medium-amd64
+
+
4
+
16 GB
+
50 GB
+
+
+
+ saas-linux-large-amd64 (Premium and Ultimate only)
+
+
8
+
32 GB
+
100 GB
+
+
+
+ saas-linux-xlarge-amd64 (Premium and Ultimate only)
+
+
16
+
64 GB
+
200 GB
+
+
+
+ saas-linux-2xlarge-amd64 (Premium and Ultimate only)
+
+
32
+
128 GB
+
200 GB
+
+
+
## Machine types available for Linux - Arm64
GitLab offers the following machine type for hosted runners on Linux Arm64.
-| Runner Tag | vCPUs | Memory | Storage |
-|-------------------------------------------------------|-------|--------|---------|
-| `saas-linux-small-arm64` | 2 | 8 GB | 30 GB |
-| `saas-linux-medium-arm64` (Premium and Ultimate only) | 4 | 16 GB | 50 GB |
-| `saas-linux-large-arm64` (Premium and Ultimate only) | 8 | 32 GB | 100 GB |
+
+
+
+
Runner Tag
+
vCPUs
+
Memory
+
Storage
+
+
+
+
+
+ saas-linux-small-arm64
+
+
2
+
8 GB
+
30 GB
+
+
+
+ saas-linux-medium-arm64 (Premium and Ultimate only)
+
+
4
+
16 GB
+
50 GB
+
+
+
+ saas-linux-large-arm64 (Premium and Ultimate only)
+
+
8
+
32 GB
+
100 GB
+
+
+
{{< alert type="note" >}}
diff --git a/doc/user/application_security/sast/_index.md b/doc/user/application_security/sast/_index.md
index 100996fde1c..705cf68707e 100644
--- a/doc/user/application_security/sast/_index.md
+++ b/doc/user/application_security/sast/_index.md
@@ -812,28 +812,28 @@ The following are Docker image-related CI/CD variables.
Some analyzers can be customized with CI/CD variables.
-| CI/CD variable | Analyzer | Description |
-|-------------------------------------|----------------------|-------------|
-| `GITLAB_ADVANCED_SAST_ENABLED` | GitLab Advanced SAST | Set to `true` to enable [GitLab Advanced SAST](gitlab_advanced_sast.md) scanning (available in GitLab Ultimate only). Default: `false`. |
-| `SCAN_KUBERNETES_MANIFESTS` | Kubesec | Set to `"true"` to scan Kubernetes manifests. |
-| `KUBESEC_HELM_CHARTS_PATH` | Kubesec | Optional path to Helm charts that `helm` uses to generate a Kubernetes manifest that `kubesec` scans. If dependencies are defined, `helm dependency build` should be ran in a `before_script` to fetch the necessary dependencies. |
-| `KUBESEC_HELM_OPTIONS` | Kubesec | Additional arguments for the `helm` executable. |
-| `COMPILE` | SpotBugs | Set to `false` to disable project compilation and dependency fetching. |
-| `ANT_HOME` | SpotBugs | The `ANT_HOME` variable. |
-| `ANT_PATH` | SpotBugs | Path to the `ant` executable. |
-| `GRADLE_PATH` | SpotBugs | Path to the `gradle` executable. |
-| `JAVA_OPTS` | SpotBugs | Additional arguments for the `java` executable. |
-| `JAVA_PATH` | SpotBugs | Path to the `java` executable. |
-| `SAST_JAVA_VERSION` | SpotBugs | Which Java version to use. [Starting in GitLab 15.0](https://gitlab.com/gitlab-org/gitlab/-/issues/352549), supported versions are `11` and `17` (default). Before GitLab 15.0, supported versions are `8` (default) and `11`. |
-| `MAVEN_CLI_OPTS` | SpotBugs | Additional arguments for the `mvn` or `mvnw` executable. |
-| `MAVEN_PATH` | SpotBugs | Path to the `mvn` executable. |
-| `MAVEN_REPO_PATH` | SpotBugs | Path to the Maven local repository (shortcut for the `maven.repo.local` property). |
-| `SBT_PATH` | SpotBugs | Path to the `sbt` executable. |
-| `FAIL_NEVER` | SpotBugs | Set to `1` to ignore compilation failure. |
-| `SAST_SEMGREP_METRICS` | Semgrep | Set to `"false"` to disable sending anonymized scan metrics to [r2c](https://semgrep.dev). Default: `true`. |
-| `SAST_SCANNER_ALLOWED_CLI_OPTS` | Semgrep | CLI options (arguments with value, or flags) that are passed to the underlying security scanner when running scan operation. Only a limited set of [options](#security-scanner-configuration) are accepted. Separate a CLI option and its value using either a blank space or equals (`=`) character. For example: `name1 value1` or `name1=value1`. Multiple options must be separated by blank spaces. For example: `name1 value1 name2 value2`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/368565) in GitLab 15.3. |
-| `SAST_RULESET_GIT_REFERENCE` | All | Defines a path to a custom ruleset configuration. If a project has a `.gitlab/sast-ruleset.toml` file committed, that local configuration takes precedence and the file from `SAST_RULESET_GIT_REFERENCE` isn't used. This variable is available for the Ultimate tier only. |
-| `SECURE_ENABLE_LOCAL_CONFIGURATION` | All | Enables the option to use custom ruleset configuration. If `SECURE_ENABLE_LOCAL_CONFIGURATION` is set to `false`, the project's custom ruleset configuration file at `.gitlab/sast-ruleset.toml` is ignored and the file from `SAST_RULESET_GIT_REFERENCE` or the default configuration takes precedence. |
+| CI/CD variable | Analyzer | Default | Description |
+|-------------------------------------|----------------------|-------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| `GITLAB_ADVANCED_SAST_ENABLED` | GitLab Advanced SAST | `false` | Set to `true` to enable [GitLab Advanced SAST](gitlab_advanced_sast.md) scanning (available in GitLab Ultimate only). |
+| `SCAN_KUBERNETES_MANIFESTS` | Kubesec | `"false"` | Set to `"true"` to scan Kubernetes manifests. |
+| `KUBESEC_HELM_CHARTS_PATH` | Kubesec | | Optional path to Helm charts that `helm` uses to generate a Kubernetes manifest that `kubesec` scans. If dependencies are defined, `helm dependency build` should be ran in a `before_script` to fetch the necessary dependencies. |
+| `KUBESEC_HELM_OPTIONS` | Kubesec | | Additional arguments for the `helm` executable. |
+| `COMPILE` | SpotBugs | `true` | Set to `false` to disable project compilation and dependency fetching. |
+| `ANT_HOME` | SpotBugs | | The `ANT_HOME` variable. |
+| `ANT_PATH` | SpotBugs | `ant` | Path to the `ant` executable. |
+| `GRADLE_PATH` | SpotBugs | `gradle` | Path to the `gradle` executable. |
+| `JAVA_OPTS` | SpotBugs | `-XX:MaxRAMPercentage=80` | Additional arguments for the `java` executable. |
+| `JAVA_PATH` | SpotBugs | `java` | Path to the `java` executable. |
+| `SAST_JAVA_VERSION` | SpotBugs | `8` for GitLab < 15 `17` for GitLab >= 15 | Which Java version to use. [Starting in GitLab 15.0](https://gitlab.com/gitlab-org/gitlab/-/issues/352549), supported versions are `11` and `17`. Before GitLab 15.0, supported versions are `8` and `11`. |
+| `MAVEN_CLI_OPTS` | SpotBugs | `--batch-mode -DskipTests=true` | Additional arguments for the `mvn` or `mvnw` executable. |
+| `MAVEN_PATH` | SpotBugs | `mvn` | Path to the `mvn` executable. |
+| `MAVEN_REPO_PATH` | SpotBugs | `$HOME/.m2/repository` | Path to the Maven local repository (shortcut for the `maven.repo.local` property). |
+| `SBT_PATH` | SpotBugs | `sbt` | Path to the `sbt` executable. |
+| `FAIL_NEVER` | SpotBugs | `false` | Set to `true` or `1` to ignore compilation failure. |
+| `SAST_SEMGREP_METRICS` | Semgrep | `true` | Set to `false` to disable sending anonymized scan metrics to [r2c](https://semgrep.dev). |
+| `SAST_SCANNER_ALLOWED_CLI_OPTS` | Semgrep | `--max-target-bytes=1000000 --timeout=5` | CLI options (arguments with value, or flags) that are passed to the underlying security scanner when running scan operation. Only a limited set of [options](#security-scanner-configuration) are accepted. Separate a CLI option and its value using either a blank space or equals (`=`) character. For example: `name1 value1` or `name1=value1`. Multiple options must be separated by blank spaces. For example: `name1 value1 name2 value2`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/368565) in GitLab 15.3. |
+| `SAST_RULESET_GIT_REFERENCE` | All | | Defines a path to a custom ruleset configuration. If a project has a `.gitlab/sast-ruleset.toml` file committed, that local configuration takes precedence and the file from `SAST_RULESET_GIT_REFERENCE` isn't used. This variable is available for the Ultimate tier only. |
+| `SECURE_ENABLE_LOCAL_CONFIGURATION` | All | `false` | Enables the option to use custom ruleset configuration. If `SECURE_ENABLE_LOCAL_CONFIGURATION` is set to `false`, the project's custom ruleset configuration file at `.gitlab/sast-ruleset.toml` is ignored and the file from `SAST_RULESET_GIT_REFERENCE` or the default configuration takes precedence. |
#### Security scanner configuration
diff --git a/doc/user/application_security/sast/gitlab_advanced_sast.md b/doc/user/application_security/sast/gitlab_advanced_sast.md
index 8b164445156..7aed6c0fb84 100644
--- a/doc/user/application_security/sast/gitlab_advanced_sast.md
+++ b/doc/user/application_security/sast/gitlab_advanced_sast.md
@@ -134,10 +134,10 @@ variables.
GitLab Advanced SAST can be configured using the following CI/CD variables.
-| **CI/CD variable** | **Description** |
-|--------------------------------|-------------------------------------------------------------------------------------------------|
-| `GITLAB_ADVANCED_SAST_ENABLED` | Set to `true` to enable GitLab Advanced SAST scanning, or `false` to disable. Default: `false`. |
-| `FF_GLAS_ENABLE_PHP_SUPPORT` | Set to `true` to analyze PHP files, or false to disable. Default: `true`. |
+| CI/CD variable | Default | Description |
+|--------------------------------|---------|-------------------------------------------------------------------------------|
+| `GITLAB_ADVANCED_SAST_ENABLED` | `false` | Set to `true` to enable GitLab Advanced SAST scanning, or `false` to disable. |
+| `FF_GLAS_ENABLE_PHP_SUPPORT` | `true` | Set to `true` to analyze PHP files, or false to disable. |
### Requirements
diff --git a/doc/user/duo_workflow/_index.md b/doc/user/duo_workflow/_index.md
index 7cfbb7ec516..aec3a052db6 100644
--- a/doc/user/duo_workflow/_index.md
+++ b/doc/user/duo_workflow/_index.md
@@ -108,9 +108,9 @@ Now you can use Workflow to help solve your coding tasks.
To use Workflow in VS Code:
-1. Open the command palette:
- - On macOS: Cmd + Shift + P
- - On Windows and Linux: Ctrl + P.
+1. Open the Command Palette:
+ - On macOS: Cmd+Shift+P.
+ - On Windows and Linux: Ctrl+Shift+P.
1. Type `GitLab Duo Workflow` and select **GitLab: Show Duo Workflow**.
1. In the text box, specify a code task in detail.
- For assistance writing your prompt, see [use case examples](use_cases.md) and [best practices](best_practices.md).
diff --git a/lib/gitlab/gon_helper.rb b/lib/gitlab/gon_helper.rb
index cf25236b335..4a9b2bb4c56 100644
--- a/lib/gitlab/gon_helper.rb
+++ b/lib/gitlab/gon_helper.rb
@@ -92,6 +92,7 @@ module Gitlab
push_frontend_feature_flag(:new_project_creation_form, current_user, type: :wip)
push_frontend_feature_flag(:work_items_client_side_boards, current_user)
push_frontend_feature_flag(:glql_work_items, current_user, type: :wip)
+ push_frontend_feature_flag(:show_date_when_relative_time_over_a_year, current_user, type: :gitlab_com_derisk)
end
# Exposes the state of a feature flag to the frontend code.
diff --git a/lib/gitlab/import_export/project/object_builder.rb b/lib/gitlab/import_export/project/object_builder.rb
index 5847ac374b0..71a0ec6339e 100644
--- a/lib/gitlab/import_export/project/object_builder.rb
+++ b/lib/gitlab/import_export/project/object_builder.rb
@@ -101,6 +101,10 @@ module Gitlab
author = row.delete('commit_author')
committer = row.delete('committer')
+ # We temporarily add 'project' to the attributes for DiffCommitUser processing,
+ # but MergeRequestDiffCommit doesn't have a project_id column, so we remove it
+ row.delete('project')
+
row['commit_author'] = author ||
find_or_create_diff_commit_user(aname, amail)
@@ -112,7 +116,12 @@ module Gitlab
def find_or_create_diff_commit_user(name, email)
find_with_cache([MergeRequest::DiffCommitUser, name, email]) do
- MergeRequest::DiffCommitUser.find_or_create(name, email)
+ MergeRequest::DiffCommitUser.find_or_create(
+ name,
+ email,
+ project.organization_id,
+ with_organization: Feature.enabled?(:add_organization_to_diff_commit_users, project)
+ )
end
end
diff --git a/lib/gitlab/import_export/project/relation_factory.rb b/lib/gitlab/import_export/project/relation_factory.rb
index d9129c35bf1..c38d1e5a9f6 100644
--- a/lib/gitlab/import_export/project/relation_factory.rb
+++ b/lib/gitlab/import_export/project/relation_factory.rb
@@ -266,6 +266,25 @@ module Gitlab
@relation_hash['namespace_id'] = @importable.project_namespace_id
end
+ def find_or_create_object!
+ return unique_relation_object if unique_relation?
+
+ # Can't use IDs as validation exists calling `group` or `project` attributes
+ finder_hash = parsed_relation_hash.tap do |hash|
+ if relation_class.attribute_method?('group_id') && @importable.is_a?(::Project)
+ hash['group'] = @importable.group
+ end
+
+ # Add project for DiffCommitUser to determine organization_id
+ hash['project'] = @importable if @relation_name == :'MergeRequest::DiffCommitUser'
+
+ hash[importable_class_name] = @importable if relation_class.reflect_on_association(importable_class_name.to_sym)
+ hash.delete(importable_column_name)
+ end
+
+ @object_builder.build(relation_class, finder_hash)
+ end
+
def compute_relative_position
return unless max_relative_position
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index d5baa40c991..86ed9471a22 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -35770,6 +35770,9 @@ msgstr[1] ""
msgid "Label %{labelName} was not found"
msgstr ""
+msgid "Label '%{command_label}' for command id '%{command}' must not start with '%{prefix}'"
+msgstr ""
+
msgid "Label actions dropdown"
msgstr ""
diff --git a/spec/controllers/concerns/strong_pagination_params_spec.rb b/spec/controllers/concerns/strong_pagination_params_spec.rb
index 18730b08c9a..a474ca8fe51 100644
--- a/spec/controllers/concerns/strong_pagination_params_spec.rb
+++ b/spec/controllers/concerns/strong_pagination_params_spec.rb
@@ -14,11 +14,9 @@ RSpec.describe StrongPaginationParams, feature_category: :tooling do
subject(:controller) { controller_class.new }
it 'returns an empty hash if params are not present' do
- allow(controller).to receive(:params) do
- ActionController::Parameters.new({})
- end
+ allow(controller).to receive(:params) { ActionController::Parameters.new({}) }
- expect(controller.pagination_params).to eq({})
+ expect(controller.pagination_params).to eq(ActionController::Parameters.new({}).permit!)
end
it 'cleans up any params that are not allowed / relevant' do
diff --git a/spec/factories/merge_request_diff_commit_users.rb b/spec/factories/merge_request_diff_commit_users.rb
index 94bd358454c..29b1443ecc7 100644
--- a/spec/factories/merge_request_diff_commit_users.rb
+++ b/spec/factories/merge_request_diff_commit_users.rb
@@ -4,5 +4,6 @@ FactoryBot.define do
factory :merge_request_diff_commit_user, class: 'MergeRequest::DiffCommitUser' do
name { generate(:name) }
email { generate(:email) }
+ organization_id { create(:organization).id }
end
end
diff --git a/spec/features/boards/board_filters_spec.rb b/spec/features/boards/board_filters_spec.rb
index 0609ad45e9c..0260aa1ec04 100644
--- a/spec/features/boards/board_filters_spec.rb
+++ b/spec/features/boards/board_filters_spec.rb
@@ -179,7 +179,10 @@ RSpec.describe 'Issue board filters', :js, feature_category: :team_planning do
expect_filtered_search_dropdown_results(filter_dropdown, 3)
- click_on 'Incident'
+ within_testid('filtered-search-input') do
+ click_on 'Incident'
+ end
+
filter_submit.click
expect(find('[data-testid="board-list"]:nth-child(1)')).to have_selector('.board-card', count: 1)
diff --git a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
index 85f4956092b..ebbfc5dc3a9 100644
--- a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
+++ b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
@@ -48,6 +48,7 @@ exports[`Design management list item component with notes renders item with mult
@@ -111,6 +112,7 @@ exports[`Design management list item component with notes renders item with sing
diff --git a/spec/frontend/lib/utils/datetime/timeago_utility_spec.js b/spec/frontend/lib/utils/datetime/timeago_utility_spec.js
index b633831a7fe..faee9eee0f9 100644
--- a/spec/frontend/lib/utils/datetime/timeago_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime/timeago_utility_spec.js
@@ -48,6 +48,24 @@ describe('TimeAgo utils', () => {
])('formats date `%p` as `%p`', (date, result) => {
expect(getTimeago().format(date)).toEqual(result);
});
+
+ describe('when dates are over a year and showDateWhenRelativeTimeOverAYear is true', () => {
+ beforeEach(() => {
+ window.gon.features = { showDateWhenRelativeTimeOverAYear: true };
+ });
+
+ it.each([
+ [new Date().getTime() + 60e3 * 60 * 24 * 365],
+ [new Date().getTime() + 60e3 * 60 * 24 * 366],
+ [new Date().getTime() + 60e3 * 60 * 24 * 366 * 2],
+ [new Date().getTime() - 60e3 * 60 * 24 * 366],
+ [new Date().getTime() - 60e3 * 60 * 24 * 366 * 2],
+ ])('formats date `%p` as date', (date) => {
+ expect(getTimeago().format(date)).toEqual(
+ localeDateFormat[DATE_ONLY_FORMAT].format(date),
+ );
+ });
+ });
});
describe('with User Setting timeDisplayRelative: false', () => {
diff --git a/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js b/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
index 21c58d662e3..0446f1d6c45 100644
--- a/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
+++ b/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
@@ -4,7 +4,7 @@ import { GlTruncate } from '@gitlab/ui';
import timezoneMock from 'timezone-mock';
import { getTimeago } from '~/lib/utils/datetime_utility';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
-import { DATE_ONLY_FORMAT } from '~/lib/utils/datetime/locale_dateformat';
+import { DATE_ONLY_FORMAT, localeDateFormat } from '~/lib/utils/datetime/locale_dateformat';
describe('Time ago with tooltip component', () => {
let vm;
@@ -89,6 +89,30 @@ describe('Time ago with tooltip component', () => {
});
});
+ describe('when gon.features.showDateWhenRelativeTimeOverAYear is true', () => {
+ beforeEach(() => {
+ window.gon = { features: { showDateWhenRelativeTimeOverAYear: true } };
+ });
+
+ const timestampOneYearAgo = new Date().getTime() - 60e3 * 60 * 24 * 366;
+
+ describe('with showDateWhenOverAYear: false', () => {
+ it('should render the relative time', () => {
+ buildVm({ time: timestampOneYearAgo, showDateWhenOverAYear: false });
+
+ expect(vm.text()).toEqual('1 year ago');
+ });
+ });
+
+ describe('with showDateWhenOverAYear: true', () => {
+ it('should render the date', () => {
+ buildVm({ time: timestampOneYearAgo, showDateWhenOverAYear: true });
+
+ expect(vm.text()).toEqual(localeDateFormat[DATE_ONLY_FORMAT].format(timestampOneYearAgo));
+ });
+ });
+ });
+
describe('number based timestamps', () => {
// Store a date object before we mock the TZ
const date = new Date();
diff --git a/spec/frontend/work_items/components/design_management/__snapshots__/design_item_spec.js.snap b/spec/frontend/work_items/components/design_management/__snapshots__/design_item_spec.js.snap
index fd73d60988d..1032132f6cf 100644
--- a/spec/frontend/work_items/components/design_management/__snapshots__/design_item_spec.js.snap
+++ b/spec/frontend/work_items/components/design_management/__snapshots__/design_item_spec.js.snap
@@ -49,6 +49,7 @@ exports[`Design item component with notes renders item with multiple comments 1`
@@ -115,6 +116,7 @@ exports[`Design item component with notes renders item with single comment 1`] =
diff --git a/spec/frontend/work_items/components/work_item_type_icon_spec.js b/spec/frontend/work_items/components/work_item_type_icon_spec.js
index 3a30d753283..564fe501170 100644
--- a/spec/frontend/work_items/components/work_item_type_icon_spec.js
+++ b/spec/frontend/work_items/components/work_item_type_icon_spec.js
@@ -1,12 +1,12 @@
import { GlIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import WorkItemTypeIcon from '~/work_items/components/work_item_type_icon.vue';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
let wrapper;
function createComponent(propsData) {
- wrapper = shallowMount(WorkItemTypeIcon, {
+ wrapper = shallowMountExtended(WorkItemTypeIcon, {
propsData,
directives: {
GlTooltip: createMockDirective('gl-tooltip'),
@@ -16,6 +16,7 @@ function createComponent(propsData) {
describe('Work Item type component', () => {
const findIcon = () => wrapper.findComponent(GlIcon);
+ const findButton = () => wrapper.findByTestId('work-item-type-icon');
describe.each`
workItemType | iconName | text | showTooltipOnHover | iconVariant
@@ -52,7 +53,7 @@ describe('Work Item type component', () => {
});
it('shows tooltip on hover when props passed', () => {
- const tooltip = getBinding(findIcon().element, 'gl-tooltip');
+ const tooltip = getBinding(findButton().element, 'gl-tooltip');
expect(tooltip.value).toBe(showTooltipOnHover);
});
diff --git a/spec/helpers/diff_helper_spec.rb b/spec/helpers/diff_helper_spec.rb
index 5565d99b166..288a7041d68 100644
--- a/spec/helpers/diff_helper_spec.rb
+++ b/spec/helpers/diff_helper_spec.rb
@@ -498,7 +498,7 @@ RSpec.describe DiffHelper, feature_category: :code_review_workflow do
end
it "filters with safe_params" do
- expect(helper.params_with_whitespace).to eq({ 'w' => 1 })
+ expect(helper.params_with_whitespace.to_h).to eq({ 'w' => 1 })
end
end
diff --git a/spec/lib/gitlab/import_export/project/object_builder_spec.rb b/spec/lib/gitlab/import_export/project/object_builder_spec.rb
index 20e176bf6fd..9249e49c309 100644
--- a/spec/lib/gitlab/import_export/project/object_builder_spec.rb
+++ b/spec/lib/gitlab/import_export/project/object_builder_spec.rb
@@ -222,12 +222,13 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
context 'merge request diff commit users' do
it 'finds the existing user' do
- user = MergeRequest::DiffCommitUser.find_or_create('Alice', 'alice@example.com')
+ user = MergeRequest::DiffCommitUser.find_or_create('Alice', 'alice@example.com', project.organization_id)
found = described_class.build(
MergeRequest::DiffCommitUser,
'name' => 'Alice',
- 'email' => 'alice@example.com'
+ 'email' => 'alice@example.com',
+ 'project' => project
)
expect(found).to eq(user)
@@ -237,7 +238,8 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
found = described_class.build(
MergeRequest::DiffCommitUser,
'name' => 'Alice',
- 'email' => 'alice@example.com'
+ 'email' => 'alice@example.com',
+ 'project' => project
)
expect(found.name).to eq('Alice')
@@ -249,7 +251,7 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
context 'when the "committer" object is present' do
it 'uses this object as the committer' do
user = MergeRequest::DiffCommitUser
- .find_or_create('Alice', 'alice@example.com')
+ .find_or_create('Alice', 'alice@example.com', project.organization_id)
commit = described_class.build(
MergeRequestDiffCommit,
@@ -258,7 +260,8 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
'committer_name' => 'Bla',
'committer_email' => 'bla@example.com',
'author_name' => 'Bla',
- 'author_email' => 'bla@example.com'
+ 'author_email' => 'bla@example.com',
+ 'project' => project
}
)
@@ -274,7 +277,8 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
'committer_name' => 'Alice',
'committer_email' => 'alice@example.com',
'author_name' => 'Alice',
- 'author_email' => 'alice@example.com'
+ 'author_email' => 'alice@example.com',
+ 'project' => project
}
)
@@ -286,7 +290,7 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
context 'when the "commit_author" object is present' do
it 'uses this object as the author' do
user = MergeRequest::DiffCommitUser
- .find_or_create('Alice', 'alice@example.com')
+ .find_or_create('Alice', 'alice@example.com', project.organization_id)
commit = described_class.build(
MergeRequestDiffCommit,
@@ -295,7 +299,8 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
'committer_email' => 'alice@example.com',
'commit_author' => user,
'author_name' => 'Bla',
- 'author_email' => 'bla@example.com'
+ 'author_email' => 'bla@example.com',
+ 'project' => project
}
)
@@ -311,7 +316,8 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
'committer_name' => 'Alice',
'committer_email' => 'alice@example.com',
'author_name' => 'Alice',
- 'author_email' => 'alice@example.com'
+ 'author_email' => 'alice@example.com',
+ 'project' => project
}
)
@@ -325,10 +331,10 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
context 'when the user already exists' do
it 'returns the existing user' do
user = MergeRequest::DiffCommitUser
- .find_or_create('Alice', 'alice@example.com')
+ .find_or_create('Alice', 'alice@example.com', project.organization_id)
found = described_class
- .new(MergeRequestDiffCommit, {})
+ .new(MergeRequestDiffCommit, { 'project' => project })
.send(:find_or_create_diff_commit_user, user.name, user.email)
expect(found).to eq(user)
@@ -338,7 +344,7 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
context 'when the user does not exist' do
it 'creates the user' do
found = described_class
- .new(MergeRequestDiffCommit, {})
+ .new(MergeRequestDiffCommit, { 'project' => project })
.send(:find_or_create_diff_commit_user, 'Alice', 'alice@example.com')
expect(found.name).to eq('Alice')
@@ -347,7 +353,7 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do
end
it 'caches the results' do
- builder = described_class.new(MergeRequestDiffCommit, {})
+ builder = described_class.new(MergeRequestDiffCommit, { 'project' => project })
builder.send(:find_or_create_diff_commit_user, 'Alice', 'alice@example.com')
diff --git a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
index 4d7512aed7c..df0326bc23c 100644
--- a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
@@ -732,4 +732,29 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_
end
end
end
+
+ describe 'MergeRequest::DiffCommitUser' do
+ let(:relation_sym) { :'MergeRequest::DiffCommitUser' }
+ let(:relation_hash) do
+ {
+ 'name' => 'Test Author',
+ 'email' => 'test@example.com'
+ }
+ end
+
+ it 'creates a DiffCommitUser object' do
+ expect(created_object).to be_a(MergeRequest::DiffCommitUser)
+ expect(created_object.name).to eq('Test Author')
+ expect(created_object.email).to eq('test@example.com')
+ end
+
+ it 'passes project to ObjectBuilder' do
+ expect(Gitlab::ImportExport::Project::ObjectBuilder).to receive(:build).with(
+ MergeRequest::DiffCommitUser,
+ hash_including('project' => project)
+ ).and_call_original
+
+ created_object
+ end
+ end
end
diff --git a/spec/migrations/20250513225311_queue_backfill_analyzer_project_statuses_spec.rb b/spec/migrations/20250603110954_requeue_backfill_analyzer_project_statuses_spec.rb
similarity index 85%
rename from spec/migrations/20250513225311_queue_backfill_analyzer_project_statuses_spec.rb
rename to spec/migrations/20250603110954_requeue_backfill_analyzer_project_statuses_spec.rb
index 7c3accb952e..e4b71d78250 100644
--- a/spec/migrations/20250513225311_queue_backfill_analyzer_project_statuses_spec.rb
+++ b/spec/migrations/20250603110954_requeue_backfill_analyzer_project_statuses_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe QueueBackfillAnalyzerProjectStatuses, migration: :gitlab_sec, feature_category: :security_asset_inventories do
+RSpec.describe RequeueBackfillAnalyzerProjectStatuses, migration: :gitlab_sec, feature_category: :security_asset_inventories do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
diff --git a/spec/models/merge_request/diff_commit_user_spec.rb b/spec/models/merge_request/diff_commit_user_spec.rb
index 08e073568f9..7dc1b0255f8 100644
--- a/spec/models/merge_request/diff_commit_user_spec.rb
+++ b/spec/models/merge_request/diff_commit_user_spec.rb
@@ -2,31 +2,38 @@
require 'spec_helper'
-RSpec.describe MergeRequest::DiffCommitUser do
+RSpec.describe MergeRequest::DiffCommitUser, feature_category: :code_review_workflow do
+ let_it_be(:organization_id) { create(:organization).id }
+
describe 'validations' do
it 'requires that names are less than 512 characters long' do
- expect(described_class.new(name: 'a' * 1000)).not_to be_valid
+ user = build(:merge_request_diff_commit_user, name: 'a' * 1000)
+ expect(user).not_to be_valid
end
it 'requires that Emails are less than 512 characters long' do
- expect(described_class.new(email: 'a' * 1000)).not_to be_valid
+ user = build(:merge_request_diff_commit_user, email: 'a' * 1000)
+ expect(user).not_to be_valid
end
it 'requires either a name or Email' do
- expect(described_class.new).not_to be_valid
+ user = build(:merge_request_diff_commit_user, name: nil, email: nil)
+ expect(user).not_to be_valid
end
it 'allows setting of just a name' do
- expect(described_class.new(name: 'Alice')).to be_valid
+ user = build(:merge_request_diff_commit_user, email: nil)
+ expect(user).to be_valid
end
it 'allows setting of just an Email' do
- expect(described_class.new(email: 'alice@example.com')).to be_valid
+ user = build(:merge_request_diff_commit_user, name: nil)
+ expect(user).to be_valid
end
it 'allows setting of both a name and Email' do
- expect(described_class.new(name: 'Alice', email: 'alice@example.com'))
- .to be_valid
+ user = build(:merge_request_diff_commit_user)
+ expect(user).to be_valid
end
end
@@ -41,87 +48,314 @@ RSpec.describe MergeRequest::DiffCommitUser do
end
describe '.find_or_create' do
- it 'creates a new row if none exist' do
- alice = described_class.find_or_create('Alice', 'alice@example.com')
+ context 'when with_organization is true' do
+ it 'creates a new row if none exist' do
+ alice = described_class.find_or_create('Alice', 'alice@example.com', organization_id, with_organization: true)
+ expect(alice.name).to eq('Alice')
+ expect(alice.email).to eq('alice@example.com')
+ expect(alice.organization_id).to eq(organization_id)
+ end
- expect(alice.name).to eq('Alice')
- expect(alice.email).to eq('alice@example.com')
+ it 'returns an existing row if one exists' do
+ user1 = create(:merge_request_diff_commit_user, organization_id: organization_id)
+ user2 = described_class.find_or_create(user1.name, user1.email, organization_id, with_organization: true)
+ expect(user1).to eq(user2)
+ end
+
+ it 'updates organization_id if an existing record has nil organization_id' do
+ user_without_org = create(
+ :merge_request_diff_commit_user,
+ name: 'Alice',
+ email: 'alice@example.com',
+ organization_id: nil
+ )
+
+ updated_user = described_class.find_or_create('Alice', 'alice@example.com', organization_id,
+ with_organization: true)
+
+ expect(updated_user.id).to eq(user_without_org.id)
+ expect(updated_user.organization_id).to eq(organization_id)
+
+ user_without_org.reload
+ expect(user_without_org.organization_id).to eq(organization_id)
+ end
+
+ it 'handles concurrent inserts' do
+ user = create(:merge_request_diff_commit_user, organization_id: organization_id)
+
+ # Stub find_by to always return nil to force the find_or_create_by! path
+ expect(described_class)
+ .to receive(:find_by)
+ .with(name: user.name, email: user.email, organization_id: organization_id)
+ .and_return(nil)
+
+ # If organization_id is present, expect a second find_by
+ expect(described_class)
+ .to receive(:find_by)
+ .with(name: user.name, email: user.email, organization_id: nil)
+ .and_return(nil)
+
+ # Now expect find_or_create_by! to be called and raise the error
+ expect(described_class)
+ .to receive(:find_or_create_by!)
+ .ordered
+ .with(name: user.name, email: user.email, organization_id: organization_id)
+ .and_raise(ActiveRecord::RecordNotUnique)
+
+ # On retry, the first find_by should succeed
+ expect(described_class)
+ .to receive(:find_by)
+ .with(name: user.name, email: user.email, organization_id: organization_id)
+ .and_return(user)
+
+ expect(described_class.find_or_create(user.name, user.email, organization_id,
+ with_organization: true)).to eq(user)
+ end
end
- it 'returns an existing row if one exists' do
- user1 = create(:merge_request_diff_commit_user)
- user2 = described_class.find_or_create(user1.name, user1.email)
+ context 'when with_organization is false (default)' do
+ it 'creates a new row without organization_id' do
+ alice = described_class.find_or_create('Alice', 'alice@example.com', organization_id)
+ expect(alice.name).to eq('Alice')
+ expect(alice.email).to eq('alice@example.com')
+ expect(alice.organization_id).to be_nil
+ end
- expect(user1).to eq(user2)
+ it 'returns an existing row without considering organization_id' do
+ user1 = create(:merge_request_diff_commit_user, name: 'Bob', email: 'bob@example.com', organization_id: nil)
+ user2 = described_class.find_or_create('Bob', 'bob@example.com', organization_id)
+ expect(user1).to eq(user2)
+ end
+
+ it 'ignores organization_id parameter when with_organization is false' do
+ alice = described_class.find_or_create('Alice', 'alice@example.com', organization_id)
+ expect(alice.organization_id).to be_nil
+ end
+
+ it 'returns an existing row even if it has organization_id set' do
+ # Record created when feature flag was enabled
+ user1 = create(:merge_request_diff_commit_user,
+ name: 'Charlie',
+ email: 'charlie@example.com',
+ organization_id: organization_id)
+
+ # Finding the record after feature flag is disabled
+ user2 = described_class.find_or_create('Charlie', 'charlie@example.com', organization_id)
+
+ expect(user2).to eq(user1)
+ expect(user2.organization_id).to eq(organization_id)
+ end
+ end
+ end
+
+ describe '.bulk_find' do
+ context 'when with_organization is true' do
+ it 'finds records using organization_id' do
+ user = create(:merge_request_diff_commit_user, organization_id: organization_id)
+ non_matching_user = create(:merge_request_diff_commit_user, organization_id: create(:organization).id)
+
+ triples = [[user.name, user.email, organization_id]]
+
+ results = described_class.bulk_find(triples, with_organization: true)
+ expect(results).to include(user)
+ expect(results).not_to include(non_matching_user)
+ end
end
- it 'handles concurrent inserts' do
- user = create(:merge_request_diff_commit_user)
+ context 'when with_organization is false' do
+ it 'finds records without using organization_id' do
+ user = create(:merge_request_diff_commit_user, name: 'Alice', email: 'alice@example.com', organization_id: nil)
+ non_matching_user = create(:merge_request_diff_commit_user, name: 'Bob', email: 'bob@example.com')
- expect(described_class)
- .to receive(:find_or_create_by!)
- .ordered
- .with(name: user.name, email: user.email)
- .and_raise(ActiveRecord::RecordNotUnique)
+ pairs = [['Alice', 'alice@example.com', organization_id]] # organization_id is ignored
- expect(described_class)
- .to receive(:find_or_create_by!)
- .ordered
- .with(name: user.name, email: user.email)
- .and_return(user)
-
- expect(described_class.find_or_create(user.name, user.email)).to eq(user)
+ results = described_class.bulk_find(pairs)
+ expect(results).to include(user)
+ expect(results).not_to include(non_matching_user)
+ end
end
end
describe '.bulk_find_or_create' do
- it 'bulk creates missing rows and reuses existing rows' do
- bob = create(
- :merge_request_diff_commit_user,
- name: 'Bob',
- email: 'bob@example.com'
- )
+ context 'when with_organization is true' do
+ it 'bulk creates missing rows and reuses existing rows' do
+ bob = create(
+ :merge_request_diff_commit_user,
+ name: 'Bob',
+ email: 'bob@example.com',
+ organization_id: organization_id
+ )
- users = described_class.bulk_find_or_create(
- [%w[Alice alice@example.com], %w[Bob bob@example.com]]
- )
- alice = described_class.find_by(name: 'Alice')
+ triples = [
+ ['Alice', 'alice@example.com', organization_id],
+ ['Bob', 'bob@example.com', organization_id]
+ ]
- expect(users[%w[Alice alice@example.com]]).to eq(alice)
- expect(users[%w[Bob bob@example.com]]).to eq(bob)
+ users = described_class.bulk_find_or_create(triples, with_organization: true)
+
+ alice = described_class.find_by(name: 'Alice', email: 'alice@example.com', organization_id: organization_id)
+ expect(users[['Alice', 'alice@example.com', organization_id]]).to eq(alice)
+ expect(users[['Bob', 'bob@example.com', organization_id]]).to eq(bob)
+ expect(alice.organization_id).to eq(organization_id)
+ end
+
+ it 'does not insert any data when all users exist' do
+ bob = create(
+ :merge_request_diff_commit_user,
+ name: 'Bob',
+ email: 'bob@example.com',
+ organization_id: organization_id
+ )
+
+ triples = [['Bob', 'bob@example.com', organization_id]]
+
+ # Mock to verify insert_all isn't called
+ expect(described_class).not_to receive(:insert_all)
+
+ users = described_class.bulk_find_or_create(triples, with_organization: true)
+ expect(users[['Bob', 'bob@example.com', organization_id]]).to eq(bob)
+ end
+
+ it 'handles concurrently inserted rows' do
+ bob = create(
+ :merge_request_diff_commit_user,
+ name: 'Bob',
+ email: 'bob@example.com',
+ organization_id: organization_id
+ )
+
+ triples = [['Bob', 'bob@example.com', organization_id]]
+
+ # First call: initial bulk_find with with_organization: false
+ expect(described_class)
+ .to receive(:bulk_find)
+ .with(triples, with_organization: false)
+ .and_return([])
+
+ # Mock insert_all to return empty array (simulating concurrent insert happened)
+ expect(described_class)
+ .to receive(:insert_all)
+ .and_return([])
+
+ # Final call: checking for concurrent inserts with with_organization: true
+ expect(described_class)
+ .to receive(:bulk_find)
+ .with(triples, with_organization: true)
+ .and_return([bob])
+
+ users = described_class.bulk_find_or_create(triples, with_organization: true)
+ expect(users[['Bob', 'bob@example.com', organization_id]]).to eq(bob)
+ end
+
+ it 'assigns organization_id to all created records' do
+ triples = [
+ ['Alice', 'alice@example.com', organization_id],
+ ['Bob', 'bob@example.com', organization_id],
+ ['Charlie', 'charlie@example.com', organization_id]
+ ]
+
+ users = described_class.bulk_find_or_create(triples, with_organization: true)
+ expect(users.values.map(&:organization_id).uniq).to eq([organization_id])
+ end
+
+ it 'updates organization_id for existing records with nil organization_id' do
+ # Existing users without organization_id
+ alice_without_org = create(
+ :merge_request_diff_commit_user,
+ name: 'Alice',
+ email: 'alice@example.com',
+ organization_id: nil
+ )
+ bob_without_org = create(
+ :merge_request_diff_commit_user,
+ name: 'Bob',
+ email: 'bob@example.com',
+ organization_id: nil
+ )
+
+ triples = [
+ ['Alice', 'alice@example.com', organization_id],
+ ['Bob', 'bob@example.com', organization_id],
+ ['Charlie', 'charlie@example.com', organization_id] # New user
+ ]
+
+ users = described_class.bulk_find_or_create(triples, with_organization: true)
+
+ # Reload to get updated values
+ alice_without_org.reload
+ bob_without_org.reload
+
+ # Check that existing users were found and updated
+ expect(users[['Alice', 'alice@example.com', organization_id]]).to eq(alice_without_org)
+ expect(users[['Bob', 'bob@example.com', organization_id]]).to eq(bob_without_org)
+
+ # Check that organization_id was updated
+ expect(alice_without_org.organization_id).to eq(organization_id)
+ expect(bob_without_org.organization_id).to eq(organization_id)
+
+ # Check that new user was created with organization_id
+ charlie = users[['Charlie', 'charlie@example.com', organization_id]]
+ expect(charlie.organization_id).to eq(organization_id)
+ end
end
- it 'does not insert any data when all users exist' do
- bob = create(
- :merge_request_diff_commit_user,
- name: 'Bob',
- email: 'bob@example.com'
- )
+ context 'when with_organization is false (default)' do
+ it 'bulk creates missing rows without organization_id' do
+ bob = create(
+ :merge_request_diff_commit_user,
+ name: 'Bob',
+ email: 'bob@example.com',
+ organization_id: nil
+ )
- users = described_class.bulk_find_or_create([%w[Bob bob@example.com]])
+ # Joe was created when feature flag was enabled
+ joe = create(
+ :merge_request_diff_commit_user,
+ name: 'Joe',
+ email: 'joe@example.com',
+ organization_id: organization_id
+ )
- expect(described_class).not_to receive(:insert_all)
- expect(users[%w[Bob bob@example.com]]).to eq(bob)
- end
+ pairs = [
+ ['Alice', 'alice@example.com'],
+ ['Bob', 'bob@example.com'],
+ ['Joe', 'joe@example.com']
+ ]
- it 'handles concurrently inserted rows' do
- bob = create(
- :merge_request_diff_commit_user,
- name: 'Bob',
- email: 'bob@example.com'
- )
+ users = described_class.bulk_find_or_create(pairs)
- input = [%w[Bob bob@example.com]]
+ alice = described_class.find_by(name: 'Alice', email: 'alice@example.com', organization_id: nil)
+ expect(users[['Alice', 'alice@example.com']]).to eq(alice)
+ expect(users[['Bob', 'bob@example.com']]).to eq(bob)
+ expect(users[['Joe', 'joe@example.com']]).to eq(joe)
+ expect(alice.organization_id).to be_nil
+ end
- expect(described_class)
- .to receive(:bulk_find)
- .twice
- .with(input)
- .and_return([], [bob])
+ it 'handles input with organization_id but ignores it' do
+ # Even if triples are passed, organization_id is ignored when with_organization is false
+ triples = [
+ ['Alice', 'alice@example.com', organization_id],
+ ['Bob', 'bob@example.com', organization_id]
+ ]
- users = described_class.bulk_find_or_create(input)
+ users = described_class.bulk_find_or_create(triples)
- expect(users[%w[Bob bob@example.com]]).to eq(bob)
+ alice = described_class.find_by(name: 'Alice', email: 'alice@example.com', organization_id: nil)
+ bob = described_class.find_by(name: 'Bob', email: 'bob@example.com', organization_id: nil)
+
+ expect(users[['Alice', 'alice@example.com']]).to eq(alice)
+ expect(users[['Bob', 'bob@example.com']]).to eq(bob)
+ expect(alice.organization_id).to be_nil
+ expect(bob.organization_id).to be_nil
+ end
+
+ it 'uses the legacy method internally' do
+ pairs = [['Alice', 'alice@example.com']]
+
+ expect(described_class).to receive(:bulk_find_or_create_legacy).with(pairs).and_call_original
+
+ described_class.bulk_find_or_create(pairs)
+ end
end
end
end
diff --git a/spec/models/merge_request_diff_commit_spec.rb b/spec/models/merge_request_diff_commit_spec.rb
index f44bb36ac3d..5755d1c0f3a 100644
--- a/spec/models/merge_request_diff_commit_spec.rb
+++ b/spec/models/merge_request_diff_commit_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe MergeRequestDiffCommit, feature_category: :code_review_workflow d
end
describe '.create_bulk' do
- subject { described_class.create_bulk(merge_request_diff_id, commits, skip_commit_data: skip_commit_data) }
+ subject { described_class.create_bulk(merge_request_diff_id, commits, project, skip_commit_data: skip_commit_data) }
let(:merge_request_diff_id) { merge_request.merge_request_diff.id }
let(:skip_commit_data) { false }
@@ -108,8 +108,7 @@ RSpec.describe MergeRequestDiffCommit, feature_category: :code_review_workflow d
it 'creates diff commit users' do
diff = create(:merge_request_diff, merge_request: merge_request)
-
- described_class.create_bulk(diff.id, [commits.first])
+ described_class.create_bulk(diff.id, [commits.first], project)
commit_row = described_class
.find_by(merge_request_diff_id: diff.id, relative_order: 0)
@@ -162,10 +161,60 @@ RSpec.describe MergeRequestDiffCommit, feature_category: :code_review_workflow d
subject
end
end
+
+ context 'with add_organization_to_diff_commit_users feature flag' do
+ let(:test_project) { create(:project) }
+ let(:test_diff) { create(:merge_request_diff) }
+ let(:organization_id) { test_project.organization_id }
+ let(:commits) do
+ [double(:commit, to_hash: {
+ id: 'test123',
+ author_name: 'Feature Test Author',
+ author_email: 'feature@test.com',
+ committer_name: 'Feature Test Committer',
+ committer_email: 'committer@test.com',
+ authored_date: Time.current,
+ committed_date: Time.current,
+ message: 'Test commit'
+ })]
+ end
+
+ context 'when enabled' do
+ it 'uses organization_id in hash lookup' do
+ users_hash = {
+ ['Feature Test Author', 'feature@test.com', organization_id] =>
+ instance_double(MergeRequest::DiffCommitUser, id: 1),
+ ['Feature Test Committer', 'committer@test.com', organization_id] =>
+ instance_double(MergeRequest::DiffCommitUser, id: 2)
+ }
+
+ allow(MergeRequest::DiffCommitUser).to receive(:bulk_find_or_create).and_return(users_hash)
+
+ expect { described_class.create_bulk(test_diff.id, commits, test_project) }.not_to raise_error
+ end
+ end
+
+ context 'when disabled' do
+ it 'uses name and email only in hash lookup' do
+ stub_feature_flags(add_organization_to_diff_commit_users: false)
+ users_hash = {
+ ['Feature Test Author', 'feature@test.com'] =>
+ instance_double(MergeRequest::DiffCommitUser, id: 1),
+ ['Feature Test Committer', 'committer@test.com'] =>
+ instance_double(MergeRequest::DiffCommitUser, id: 2)
+ }
+
+ allow(MergeRequest::DiffCommitUser).to receive(:bulk_find_or_create).and_return(users_hash)
+
+ expect { described_class.create_bulk(test_diff.id, commits, test_project) }.not_to raise_error
+ end
+ end
+ end
end
describe '.prepare_commits_for_bulk_insert' do
- it 'returns the commit hashes and unique user tuples' do
+ it 'returns the commit hashes and unique user triples' do
+ organization_id = create(:organization).id
commit = double(:commit, to_hash: {
parent_ids: %w[foo bar],
author_name: 'a' * 1000,
@@ -173,18 +222,15 @@ RSpec.describe MergeRequestDiffCommit, feature_category: :code_review_workflow d
committer_name: 'Alice',
committer_email: 'alice@example.com'
})
-
- hashes, tuples = described_class.prepare_commits_for_bulk_insert([commit])
-
+ hashes, triples = described_class.prepare_commits_for_bulk_insert([commit], organization_id)
expect(hashes).to eq([{
author_name: 'a' * 512,
author_email: 'a' * 512,
committer_name: 'Alice',
committer_email: 'alice@example.com'
}])
-
- expect(tuples)
- .to include(['a' * 512, 'a' * 512], %w[Alice alice@example.com])
+ expect(triples)
+ .to include(['a' * 512, 'a' * 512, organization_id], ['Alice', 'alice@example.com', organization_id])
end
end
end
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index 57bba20db40..673ce49a668 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -58,6 +58,15 @@ RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
it { expect(subject.reload.patch_id_sha).to eq('f14ae956369247901117b8b7d237c9dc605898c5') }
it 'creates commits with empty messages' do
+ allow(MergeRequestDiffCommit).to receive(:create_bulk).and_call_original
+
+ expect(MergeRequestDiffCommit).to have_received(:create_bulk).with(
+ an_instance_of(Integer),
+ anything, # commits array
+ subject.project,
+ skip_commit_data: true
+ )
+
expect(subject.commits).to all(have_attributes(message: ''))
end
diff --git a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
index 0c0696ee3f2..a6055573c95 100644
--- a/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/container_registry_auth_service_shared_examples.rb
@@ -203,12 +203,8 @@ RSpec.shared_examples 'a container registry auth service' do
shared_examples 'returning tag name patterns when tag rules exist' do
context 'when the project has protection rules' do
- let(:push_delete_patterns_meta) { { 'tag_immutable_patterns' => %w[immutable1 immutable2] } }
-
- before do
- create(:container_registry_protection_tag_rule, project: project, tag_name_pattern: 'mutable')
- create(:container_registry_protection_tag_rule, :immutable, project: project, tag_name_pattern: 'immutable1')
- create(:container_registry_protection_tag_rule, :immutable, project: project, tag_name_pattern: 'immutable2')
+ before_all do
+ create(:container_registry_protection_tag_rule, project:)
end
it_behaves_like 'having the correct scope'
@@ -1656,149 +1652,6 @@ RSpec.shared_examples 'a container registry auth service' do
end
end
- describe '.tag_immutable_patterns' do
- let_it_be(:current_project) { create(:project) }
- let_it_be(:container_repository_path) { current_project.full_path }
-
- let(:current_user) { create(:user, developer_of: current_project) }
- let(:current_params) { { scopes: ["repository:#{container_repository_path}:push"] } }
-
- shared_examples 'not including tag_immutable_patterns' do
- it 'does not include tag_immutable_patterns' do
- is_expected.to include(:token)
- expect(payload['access']).not_to be_empty
- expect(payload['access'].first['meta']).not_to include('tag_immutable_patterns')
- end
- end
-
- shared_examples 'including tag_immutable_patterns' do
- it 'includes tag_immutable_patterns' do
- is_expected.to include(:token)
- expect(payload['access']).not_to be_empty
-
- expect(payload['access'].first['meta']).to include('tag_immutable_patterns')
-
- actual_patterns = payload['access'].first['meta']['tag_immutable_patterns']
- expect(actual_patterns).to match_array(%w[immutable1 immutable2])
- end
- end
-
- shared_examples 'returning an empty access field' do
- it 'returns an empty access field' do
- is_expected.to include(:token)
- expect(payload['access']).to be_empty
- end
- end
-
- context 'when there are no tag rules for immutability' do
- it_behaves_like 'not including tag_immutable_patterns'
- end
-
- context 'when there are tag rules for immutability' do
- using RSpec::Parameterized::TableSyntax
-
- before_all do
- create(:container_registry_protection_tag_rule,
- project: current_project,
- tag_name_pattern: 'not-included',
- minimum_access_level_for_push: :maintainer,
- minimum_access_level_for_delete: :maintainer
- )
- create(:container_registry_protection_tag_rule,
- :immutable,
- project: current_project,
- tag_name_pattern: 'immutable1'
- )
- create(:container_registry_protection_tag_rule,
- :immutable,
- project: current_project,
- tag_name_pattern: 'immutable2'
- )
- end
-
- context 'when feature container_registry_immutable_tags is disabled' do
- let(:current_params) { { scopes: ["repository:#{container_repository_path}:push"] } }
-
- before do
- stub_feature_flags(container_registry_immutable_tags: false)
- end
-
- it_behaves_like 'not including tag_immutable_patterns'
- end
-
- context 'when the actions do not include push, delete, or *' do
- let(:current_params) { { scopes: ["repository:#{container_repository_path}:pull"] } }
-
- it_behaves_like 'not including tag_immutable_patterns'
- end
-
- # rubocop:disable Layout/LineLength -- Avoid formatting to keep one-line table layout
- where(:user_role, :requested_scopes, :shared_example_name) do
- :developer | lazy { ["repository:#{container_repository_path}:pull"] } | 'not including tag_immutable_patterns'
- :developer | lazy { ["repository:#{container_repository_path}:push"] } | 'including tag_immutable_patterns'
- :developer | lazy { ["repository:#{container_repository_path}:delete"] } | 'returning an empty access field' # developers can't obtain delete access
- :developer | lazy { ["repository:#{container_repository_path}:pull,push"] } | 'including tag_immutable_patterns'
- :developer | lazy { ["repository:#{container_repository_path}:pull,delete"] } | 'not including tag_immutable_patterns'
- :developer | lazy { ["repository:#{container_repository_path}:push,delete"] } | 'including tag_immutable_patterns'
- :developer | lazy { ["repository:#{container_repository_path}:pull,push,delete"] } | 'including tag_immutable_patterns'
- :developer | lazy { ["repository:#{container_repository_path}:*"] } | 'returning an empty access field' # developers can't obtain full access
- :developer | lazy { ["repository:#{container_repository_path}:push,push"] } | 'including tag_immutable_patterns'
- :developer | lazy { ["repository:#{container_repository_path}:push,foo"] } | 'including tag_immutable_patterns'
- :maintainer | lazy { ["repository:#{container_repository_path}:pull"] } | 'not including tag_immutable_patterns'
- :maintainer | lazy { ["repository:#{container_repository_path}:push"] } | 'including tag_immutable_patterns'
- :maintainer | lazy { ["repository:#{container_repository_path}:delete"] } | 'including tag_immutable_patterns'
- :maintainer | lazy { ["repository:#{container_repository_path}:pull,push"] } | 'including tag_immutable_patterns'
- :maintainer | lazy { ["repository:#{container_repository_path}:pull,delete"] } | 'including tag_immutable_patterns'
- :maintainer | lazy { ["repository:#{container_repository_path}:push,delete"] } | 'including tag_immutable_patterns'
- :maintainer | lazy { ["repository:#{container_repository_path}:pull,push,delete"] } | 'including tag_immutable_patterns'
- :maintainer | lazy { ["repository:#{container_repository_path}:*"] } | 'including tag_immutable_patterns'
- :owner | lazy { ["repository:#{container_repository_path}:pull"] } | 'not including tag_immutable_patterns'
- :owner | lazy { ["repository:#{container_repository_path}:push"] } | 'including tag_immutable_patterns'
- :owner | lazy { ["repository:#{container_repository_path}:delete"] } | 'including tag_immutable_patterns'
- :owner | lazy { ["repository:#{container_repository_path}:pull,push"] } | 'including tag_immutable_patterns'
- :owner | lazy { ["repository:#{container_repository_path}:pull,delete"] } | 'including tag_immutable_patterns'
- :owner | lazy { ["repository:#{container_repository_path}:push,delete"] } | 'including tag_immutable_patterns'
- :owner | lazy { ["repository:#{container_repository_path}:pull,push,delete"] } | 'including tag_immutable_patterns'
- :owner | lazy { ["repository:#{container_repository_path}:*"] } | 'including tag_immutable_patterns'
- end
- # rubocop:enable Layout/LineLength
-
- with_them do
- let(:current_params) { { scopes: requested_scopes } }
-
- before do
- current_project.send(:"add_#{user_role}", current_user)
- end
-
- it_behaves_like params[:shared_example_name]
- end
-
- context 'when user is admin', :enable_admin_mode do
- let(:current_user) { build_stubbed(:admin) }
-
- where(:requested_scopes, :shared_example_name) do
- lazy { ["repository:#{container_repository_path}:push"] } | 'including tag_immutable_patterns'
- lazy { ["repository:#{container_repository_path}:delete"] } | 'including tag_immutable_patterns'
- lazy { ["repository:#{container_repository_path}:pull,push"] } | 'including tag_immutable_patterns'
- lazy { ["repository:#{container_repository_path}:pull,delete"] } | 'including tag_immutable_patterns'
- lazy { ["repository:#{container_repository_path}:push,delete"] } | 'including tag_immutable_patterns'
- lazy { ["repository:#{container_repository_path}:pull,push,delete"] } | 'including tag_immutable_patterns'
- lazy { ["repository:#{container_repository_path}:*"] } | 'including tag_immutable_patterns'
- lazy { ["repository:#{container_repository_path}:push,push"] } | 'including tag_immutable_patterns'
- lazy { ["repository:#{container_repository_path}:push,foo"] } | 'including tag_immutable_patterns'
- lazy { ["repository:#{container_repository_path}:pull"] } | 'not including tag_immutable_patterns'
- lazy { ["repository:#{container_repository_path}:pull,foo"] } | 'not including tag_immutable_patterns'
- end
-
- with_them do
- let(:current_params) { { scopes: requested_scopes } }
-
- it_behaves_like params[:shared_example_name]
- end
- end
- end
- end
-
def decode_user_info_from_payload(payload)
JWT.decode(payload["user"], nil, false)[0]["user_info"]
end
diff --git a/workhorse/_support/lint_last_known_acceptable.txt b/workhorse/_support/lint_last_known_acceptable.txt
index a01a35ca915..eb0e70b55ea 100644
--- a/workhorse/_support/lint_last_known_acceptable.txt
+++ b/workhorse/_support/lint_last_known_acceptable.txt
@@ -7,7 +7,8 @@ internal/api/channel_settings.go:57:28: G402: TLS MinVersion too low. (gosec)
internal/channel/channel.go:128:31: response body must be closed (bodyclose)
internal/config/config.go:247:18: G204: Subprocess launched with variable (gosec)
internal/config/config.go:339:8: G101: Potential hardcoded credentials (gosec)
-internal/dependencyproxy/dependencyproxy_test.go:572: internal/dependencyproxy/dependencyproxy_test.go:572: Line contains TODO/BUG/FIXME/NOTE/OPTIMIZE/HACK: "note that the timeout duration here is s..." (godox)
+internal/dependencyproxy/dependencyproxy.go:127: Function 'Inject' is too long (65 > 60) (funlen)
+internal/dependencyproxy/dependencyproxy_test.go:576: internal/dependencyproxy/dependencyproxy_test.go:576: Line contains TODO/BUG/FIXME/NOTE/OPTIMIZE/HACK: "note that the timeout duration here is s..." (godox)
internal/git/archive.go:67: Function 'Inject' has too many statements (55 > 40) (funlen)
internal/git/blob.go:21:5: exported: exported var SendBlob should have comment or be unexported (revive)
internal/git/diff.go:1: 1-47 lines are duplicate of `internal/git/format-patch.go:1-48` (dupl)
@@ -32,18 +33,18 @@ internal/git/upload-pack.go:37:16: Error return value of `cw.Flush` is not check
internal/git/upload-pack_test.go:72:2: error-is-as: use require.ErrorIs (testifylint)
internal/headers/headers.go:10: internal/headers/headers.go:10: Line contains TODO/BUG/FIXME/NOTE/OPTIMIZE/HACK: "Fixme: Go back to 512 bytes once https:/..." (godox)
internal/imageresizer/image_resizer.go:1:1: package-comments: should have a package comment (revive)
-internal/imageresizer/image_resizer.go:30:6: exported: exported type Resizer should have comment or be unexported (revive)
-internal/imageresizer/image_resizer.go:141:1: exported: exported function NewResizer should have comment or be unexported (revive)
-internal/imageresizer/image_resizer.go:149: Function 'Inject' is too long (65 > 60) (funlen)
-internal/imageresizer/image_resizer.go:172:30: Error return value of `imageFile.reader.Close` is not checked (errcheck)
-internal/imageresizer/image_resizer.go:174:35: G115: integer overflow conversion uint -> int (gosec)
-internal/imageresizer/image_resizer.go:195:32: Error return value of `command.KillProcessGroup` is not checked (errcheck)
-internal/imageresizer/image_resizer.go:256:28: G115: integer overflow conversion uint64 -> int64 (gosec)
-internal/imageresizer/image_resizer.go:264:41: G115: integer overflow conversion uint32 -> int32 (gosec)
-internal/imageresizer/image_resizer.go:306:46: G115: integer overflow conversion uint -> int (gosec)
-internal/imageresizer/image_resizer.go:350:17: Error return value of `res.Body.Close` is not checked (errcheck)
-internal/imageresizer/image_resizer.go:356:15: G304: Potential file inclusion via variable (gosec)
-internal/imageresizer/image_resizer.go:363:13: Error return value of `file.Close` is not checked (errcheck)
+internal/imageresizer/image_resizer.go:33:6: exported: exported type Resizer should have comment or be unexported (revive)
+internal/imageresizer/image_resizer.go:144:1: exported: exported function NewResizer should have comment or be unexported (revive)
+internal/imageresizer/image_resizer.go:152: Function 'Inject' is too long (72 > 60) (funlen)
+internal/imageresizer/image_resizer.go:182:30: Error return value of `imageFile.reader.Close` is not checked (errcheck)
+internal/imageresizer/image_resizer.go:184:35: G115: integer overflow conversion uint -> int (gosec)
+internal/imageresizer/image_resizer.go:205:32: Error return value of `command.KillProcessGroup` is not checked (errcheck)
+internal/imageresizer/image_resizer.go:266:28: G115: integer overflow conversion uint64 -> int64 (gosec)
+internal/imageresizer/image_resizer.go:274:41: G115: integer overflow conversion uint32 -> int32 (gosec)
+internal/imageresizer/image_resizer.go:316:46: G115: integer overflow conversion uint -> int (gosec)
+internal/imageresizer/image_resizer.go:356:17: Error return value of `res.Body.Close` is not checked (errcheck)
+internal/imageresizer/image_resizer.go:362:15: G304: Potential file inclusion via variable (gosec)
+internal/imageresizer/image_resizer.go:369:13: Error return value of `file.Close` is not checked (errcheck)
internal/imageresizer/image_resizer_caching.go:6:1: ST1000: at least one file in a package should have a package comment (stylecheck)
internal/proxy/proxy.go:142:14: SA6002: argument should be pointer-like to avoid allocations (staticcheck)
internal/senddata/contentprocessor/contentprocessor.go:136:35: response body must be closed (bodyclose)
@@ -52,9 +53,9 @@ internal/testhelper/gitaly.go:277: 277-296 lines are duplicate of `internal/test
internal/testhelper/gitaly.go:315: 315-336 lines are duplicate of `internal/testhelper/gitaly.go:338-357` (dupl)
internal/testhelper/gitaly.go:338: 338-357 lines are duplicate of `internal/testhelper/gitaly.go:277-296` (dupl)
internal/testhelper/testhelper.go:18:2: import 'github.com/dlclark/regexp2' is not allowed from list 'main' (depguard)
-internal/testhelper/testhelper.go:243:21: G302: Expect file permissions to be 0600 or less (gosec)
-internal/testhelper/testhelper.go:256:39: G115: integer overflow conversion uintptr -> int (gosec)
-internal/testhelper/testhelper.go:270:39: G115: integer overflow conversion uintptr -> int (gosec)
+internal/testhelper/testhelper.go:245:21: G302: Expect file permissions to be 0600 or less (gosec)
+internal/testhelper/testhelper.go:258:39: G115: integer overflow conversion uintptr -> int (gosec)
+internal/testhelper/testhelper.go:272:39: G115: integer overflow conversion uintptr -> int (gosec)
internal/transport/transport.go:147: Function 'validateIPAddress' is too long (77 > 60) (funlen)
internal/upload/artifacts_upload_test.go:49:1: cognitive complexity 32 of func `testArtifactsUploadServer` is high (> 20) (gocognit)
internal/upload/artifacts_uploader.go:82:11: G204: Subprocess launched with a potential tainted input or cmd arguments (gosec)
@@ -68,9 +69,9 @@ internal/upload/destination/objectstore/upload_strategy.go:29: internal/upload/d
internal/upload/destination/objectstore/uploader.go:5:2: G501: Blocklisted import crypto/md5: weak cryptographic primitive (gosec)
internal/upload/destination/objectstore/uploader.go:95:12: G401: Use of weak cryptographic primitive (gosec)
internal/upload/exif/exif.go:103:10: G204: Subprocess launched with variable (gosec)
-internal/upstream/routes.go:172:74: `(*upstream).wsRoute` - `matchers` always receives `nil` (unparam)
-internal/upstream/routes.go:232: Function 'configureRoutes' is too long (342 > 60) (funlen)
-internal/upstream/routes.go:490: internal/upstream/routes.go:490: Line contains TODO/BUG/FIXME/NOTE/OPTIMIZE/HACK: "TODO: We should probably not return a HT..." (godox)
+internal/upstream/routes.go:183:74: `(*upstream).wsRoute` - `matchers` always receives `nil` (unparam)
+internal/upstream/routes.go:243: Function 'configureRoutes' is too long (342 > 60) (funlen)
+internal/upstream/routes.go:501: internal/upstream/routes.go:501: Line contains TODO/BUG/FIXME/NOTE/OPTIMIZE/HACK: "TODO: We should probably not return a HT..." (godox)
internal/upstream/upstream.go:116: internal/upstream/upstream.go:116: Line contains TODO/BUG/FIXME/NOTE/OPTIMIZE/HACK: "TODO: move to LabKit https://gitlab.com/..." (godox)
internal/zipartifacts/metadata.go:118:54: G115: integer overflow conversion int -> uint32 (gosec)
-internal/zipartifacts/open_archive.go:78:28: response body must be closed (bodyclose)
+internal/zipartifacts/open_archive.go:74:28: response body must be closed (bodyclose)
diff --git a/workhorse/internal/artifacts/entry.go b/workhorse/internal/artifacts/entry.go
index 11b61f90940..709aa0be432 100644
--- a/workhorse/internal/artifacts/entry.go
+++ b/workhorse/internal/artifacts/entry.go
@@ -11,12 +11,17 @@ import (
"os"
"os/exec"
"path/filepath"
+ "strconv"
"strings"
"syscall"
"gitlab.com/gitlab-org/labkit/log"
"gitlab.com/gitlab-org/labkit/mask"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
+
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/metrics"
+
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper/command"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper/fail"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/senddata"
@@ -37,6 +42,13 @@ func (e *entry) Inject(w http.ResponseWriter, r *http.Request, sendData string)
return
}
+ if helper.IsURL(params.Archive) {
+ // Get the tracker from context and set flags
+ if tracker, ok := metrics.FromContext(r.Context()); ok {
+ tracker.SetFlag(metrics.KeyFetchedExternalURL, strconv.FormatBool(true))
+ }
+ }
+
log.WithContextFields(r.Context(), log.Fields{
"entry": params.Entry,
"archive": mask.URL(params.Archive),
diff --git a/workhorse/internal/artifacts/entry_test.go b/workhorse/internal/artifacts/entry_test.go
index ec9421b9150..469f2e0113a 100644
--- a/workhorse/internal/artifacts/entry_test.go
+++ b/workhorse/internal/artifacts/entry_test.go
@@ -13,6 +13,8 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
+
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
)
@@ -24,8 +26,13 @@ func testEntryServer(t *testing.T, archive string, entry string) *httptest.Respo
encodedEntry := base64.StdEncoding.EncodeToString([]byte(entry))
jsonParams := fmt.Sprintf(`{"Archive":"%s","Entry":"%s"}`, archive, encodedEntry)
data := base64.URLEncoding.EncodeToString([]byte(jsonParams))
-
+ if helper.IsURL(archive) {
+ r = testhelper.RequestWithMetrics(t, r)
+ }
SendEntry.Inject(w, r, data)
+ if helper.IsURL(archive) {
+ testhelper.AssertMetrics(t, r)
+ }
})
httpRequest, err := http.NewRequest("GET", "/url/path", nil)
diff --git a/workhorse/internal/dependencyproxy/dependencyproxy.go b/workhorse/internal/dependencyproxy/dependencyproxy.go
index b865c5d48a1..01fdc8ca81b 100644
--- a/workhorse/internal/dependencyproxy/dependencyproxy.go
+++ b/workhorse/internal/dependencyproxy/dependencyproxy.go
@@ -9,12 +9,15 @@ import (
"net/http"
"net/url"
"os"
+ "strconv"
"strings"
"sync"
"time"
"gitlab.com/gitlab-org/labkit/log"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/metrics"
+
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/forwardheaders"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper/fail"
@@ -122,6 +125,11 @@ func (p *Injector) SetUploadHandler(uploadHandler upload.BodyUploadHandler) {
// Inject performs the injection of dependencies
func (p *Injector) Inject(w http.ResponseWriter, r *http.Request, sendData string) {
+ // Get the tracker from context and set flags
+ if tracker, ok := metrics.FromContext(r.Context()); ok {
+ tracker.SetFlag(metrics.KeyFetchedExternalURL, strconv.FormatBool(true))
+ }
+
params, err := p.unpackParams(sendData)
if err != nil {
fail.Request(w, r, err)
diff --git a/workhorse/internal/dependencyproxy/dependencyproxy_test.go b/workhorse/internal/dependencyproxy/dependencyproxy_test.go
index 35843038969..44eaf159b28 100644
--- a/workhorse/internal/dependencyproxy/dependencyproxy_test.go
+++ b/workhorse/internal/dependencyproxy/dependencyproxy_test.go
@@ -134,9 +134,13 @@ func TestInject(t *testing.T) {
r := httptest.NewRequest("GET", "/target", nil)
sendData := base64.StdEncoding.EncodeToString([]byte(tokenJSON + originResourceServer.URL + urlJSON))
+ // add metrics tracker
+ r = testhelper.RequestWithMetrics(t, r)
+
injector.Inject(tc.responseWriter, r, sendData)
require.Equal(t, tc.handlerMustBeCalled, handlerIsCalled, "a partial file must not be saved")
+ testhelper.AssertMetrics(t, r)
}
}
@@ -170,7 +174,7 @@ func TestSuccessfullRequest(t *testing.T) {
injector := NewInjector()
injector.SetUploadHandler(uploadHandler)
- response := makeRequest(injector, tokenJSON+originResourceServer.URL+urlJSON)
+ response := makeRequest(t, injector, tokenJSON+originResourceServer.URL+urlJSON)
require.Equal(t, "/target/upload", uploadHandler.request.URL.Path)
require.Equal(t, int64(6), uploadHandler.request.ContentLength)
@@ -291,7 +295,7 @@ func TestValidUploadConfiguration(t *testing.T) {
sendDataJSONString, err := json.Marshal(sendData)
require.NoError(t, err)
- response := makeRequest(injector, string(sendDataJSONString))
+ response := makeRequest(t, injector, string(sendDataJSONString))
// check the response
require.Equal(t, 200, response.Code)
@@ -345,7 +349,7 @@ func TestInvalidUploadConfiguration(t *testing.T) {
sendDataJSONString, err := json.Marshal(tc.sendData)
require.NoError(t, err)
- response := makeRequest(NewInjector(), string(sendDataJSONString))
+ response := makeRequest(t, NewInjector(), string(sendDataJSONString))
require.Equal(t, 500, response.Code)
require.Equal(t, "Internal Server Error\n", response.Body.String())
@@ -378,7 +382,7 @@ func TestTimeoutConfiguration(t *testing.T) {
sendDataJSONString, err := json.Marshal(sendData)
require.NoError(t, err)
- response := makeRequest(injector, string(sendDataJSONString))
+ response := makeRequest(t, injector, string(sendDataJSONString))
responseResult := response.Result()
defer responseResult.Body.Close()
require.Equal(t, http.StatusGatewayTimeout, responseResult.StatusCode)
@@ -396,7 +400,7 @@ func TestSSRFFilter(t *testing.T) {
sendDataJSONString, err := json.Marshal(sendData)
require.NoError(t, err)
- response := makeRequest(NewInjector(), string(sendDataJSONString))
+ response := makeRequest(t, NewInjector(), string(sendDataJSONString))
// Test uses loopback IP like 127.0.0.x and thus fails
require.Equal(t, http.StatusForbidden, response.Code)
@@ -425,7 +429,7 @@ func TestSSRFFilterWithAllowLocalhost(t *testing.T) {
injector := NewInjector()
injector.SetUploadHandler(uploadHandler)
- response := makeRequest(injector, string(sendDataJSONString))
+ response := makeRequest(t, injector, string(sendDataJSONString))
require.Equal(t, http.StatusOK, response.Code)
}
@@ -471,7 +475,7 @@ func TestRestrictForwardedResponseHeaders(t *testing.T) {
},
})
- response := makeRequest(injector, entryParamsJSON)
+ response := makeRequest(t, injector, entryParamsJSON)
require.Equal(t, "/target/upload", uploadHandler.request.URL.Path)
require.Equal(t, int64(6), uploadHandler.request.ContentLength)
@@ -496,14 +500,14 @@ func jsonEntryParams(t *testing.T, params *map[string]interface{}) string {
}
func TestIncorrectSendData(t *testing.T) {
- response := makeRequest(NewInjector(), "")
+ response := makeRequest(t, NewInjector(), "")
require.Equal(t, 500, response.Code)
require.Equal(t, "Internal Server Error\n", response.Body.String())
}
func TestIncorrectSendDataUrl(t *testing.T) {
- response := makeRequest(NewInjector(), `{"Token": "token", "Url": "url"}`)
+ response := makeRequest(t, NewInjector(), `{"Token": "token", "Url": "url"}`)
require.Equal(t, http.StatusBadGateway, response.Code)
require.Equal(t, "Bad Gateway\n", response.Body.String())
@@ -524,7 +528,7 @@ func TestFailedOriginServer(t *testing.T) {
injector := NewInjector()
injector.SetUploadHandler(uploadHandler)
- response := makeRequest(injector, tokenJSON+originResourceServer.URL+urlJSON)
+ response := makeRequest(t, injector, tokenJSON+originResourceServer.URL+urlJSON)
require.Equal(t, 404, response.Code)
require.Equal(t, "Not found", response.Body.String())
@@ -575,7 +579,7 @@ func TestLongUploadRequest(t *testing.T) {
r := httptest.NewRequest("GET", uploadServer.URL+"/upload", nil).WithContext(ctx)
r.Header.Set("Overridden-Header", "request")
- response := makeCustomRequest(injector, `{"Token": "token", "Url": "`+originResourceServer.URL+`/upstream"}`, r)
+ response := makeCustomRequest(t, injector, `{"Token": "token", "Url": "`+originResourceServer.URL+`/upstream"}`, r)
// wait for the slow upload to finish
require.Equal(t, http.StatusOK, response.Code)
@@ -601,7 +605,7 @@ func TestHttpClientReuse(t *testing.T) {
injector := NewInjector()
injector.SetUploadHandler(uploadHandler)
- response := makeRequest(injector, tokenJSON+originResourceServer.URL+urlJSON)
+ response := makeRequest(t, injector, tokenJSON+originResourceServer.URL+urlJSON)
require.Equal(t, http.StatusOK, response.Code)
_, found := httpClients.Load(expectedKey)
require.True(t, found)
@@ -612,17 +616,21 @@ func TestHttpClientReuse(t *testing.T) {
require.NotEqual(t, cachedClient(&entryParams{SSRFFilter: true}), storedClient)
}
-func makeRequest(injector *Injector, data string) *httptest.ResponseRecorder {
+func makeRequest(t *testing.T, injector *Injector, data string) *httptest.ResponseRecorder {
r := httptest.NewRequest("GET", "/target", nil)
r.Header.Set("Overridden-Header", "request")
-
- return makeCustomRequest(injector, data, r)
+ return makeCustomRequest(t, injector, data, r)
}
-func makeCustomRequest(injector *Injector, data string, r *http.Request) *httptest.ResponseRecorder {
+func makeCustomRequest(t *testing.T, injector *Injector, data string, r *http.Request) *httptest.ResponseRecorder {
+ // add metrics tracker
+ r = testhelper.RequestWithMetrics(t, r)
+
w := httptest.NewRecorder()
sendData := base64.StdEncoding.EncodeToString([]byte(data))
injector.Inject(w, r, sendData)
+ testhelper.AssertMetrics(t, r)
+
return w
}
diff --git a/workhorse/internal/helper/helpers.go b/workhorse/internal/helper/helpers.go
index 29a6fe04b2e..45131c0e3a9 100644
--- a/workhorse/internal/helper/helpers.go
+++ b/workhorse/internal/helper/helpers.go
@@ -7,6 +7,7 @@ import (
"net/url"
"os"
"path/filepath"
+ "strings"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/log"
)
@@ -56,3 +57,8 @@ func IsContentType(expected, actual string) bool {
parsed, _, err := mime.ParseMediaType(actual)
return err == nil && parsed == expected
}
+
+// IsURL checks if the given string starts with http:// or https://
+func IsURL(path string) bool {
+ return strings.HasPrefix(path, "http://") || strings.HasPrefix(path, "https://")
+}
diff --git a/workhorse/internal/imageresizer/image_resizer.go b/workhorse/internal/imageresizer/image_resizer.go
index 40b4d1e9d49..22686d523f6 100644
--- a/workhorse/internal/imageresizer/image_resizer.go
+++ b/workhorse/internal/imageresizer/image_resizer.go
@@ -17,6 +17,9 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/metrics"
+
"gitlab.com/gitlab-org/labkit/tracing"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
@@ -163,6 +166,13 @@ func (r *Resizer) Inject(w http.ResponseWriter, req *http.Request, paramsData st
return
}
+ if helper.IsURL(params.Location) {
+ // Get the tracker from context and set flags
+ if tracker, ok := metrics.FromContext(req.Context()); ok {
+ tracker.SetFlag(metrics.KeyFetchedExternalURL, strconv.FormatBool(true))
+ }
+ }
+
imageFile, err := openSourceImage(params.Location)
if err != nil {
// This means we cannot even read the input image; fail fast.
@@ -319,12 +329,8 @@ func startResizeImageCommand(ctx context.Context, imageReader io.Reader, params
return cmd, stdout, nil
}
-func isURL(location string) bool {
- return strings.HasPrefix(location, "http://") || strings.HasPrefix(location, "https://")
-}
-
func openSourceImage(location string) (*imageFile, error) {
- if isURL(location) {
+ if helper.IsURL(location) {
return openFromURL(location)
}
diff --git a/workhorse/internal/imageresizer/image_resizer_test.go b/workhorse/internal/imageresizer/image_resizer_test.go
index 7edcac34e06..5c875eedc00 100644
--- a/workhorse/internal/imageresizer/image_resizer_test.go
+++ b/workhorse/internal/imageresizer/image_resizer_test.go
@@ -1,6 +1,7 @@
package imageresizer
import (
+ "bytes"
"encoding/base64"
"encoding/json"
"image"
@@ -9,6 +10,7 @@ import (
"net/http"
"net/http/httptest"
"os"
+ "strconv"
"testing"
"time"
@@ -230,6 +232,77 @@ func TestServeOriginalImageWhenSourceImageIsTooSmall(t *testing.T) {
require.Equal(t, content, responseData, "expected original image")
}
+func TestResizeImageFromHTTPServer(t *testing.T) {
+ // Create a test server that serves a test image
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
+ // Open a test image file
+ file, err := os.Open("../../testdata/image.png")
+ if err != nil {
+ w.WriteHeader(http.StatusInternalServerError)
+ return
+ }
+ defer file.Close()
+
+ // Get file info for content length and modification time
+ fileInfo, err := file.Stat()
+ if err != nil {
+ w.WriteHeader(http.StatusInternalServerError)
+ return
+ }
+
+ // Set appropriate headers
+ w.Header().Set("Content-Type", "image/png")
+ w.Header().Set("Content-Length", strconv.FormatInt(fileInfo.Size(), 10))
+ w.Header().Set("Last-Modified", fileInfo.ModTime().UTC().Format(http.TimeFormat))
+
+ // Copy the file to the response
+ _, _ = io.Copy(w, file)
+ }))
+ defer ts.Close()
+
+ // Create resize parameters with the test server URL
+ params := resizeParams{
+ Location: ts.URL,
+ ContentType: "image/png",
+ Width: 100,
+ }
+
+ // Create a resizer with test configuration
+ resizer := NewResizer(config.Config{
+ ImageResizerConfig: config.ImageResizerConfig{
+ MaxScalerProcs: 1,
+ MaxFilesize: 10 * 1024 * 1024, // 10MB
+ },
+ })
+
+ paramsJSON := encodeParams(t, ¶ms)
+ // Create a test request with metrics
+ req := httptest.NewRequest("GET", "/image", nil)
+ req = testhelper.RequestWithMetrics(t, req)
+ w := httptest.NewRecorder()
+
+ // Call the Inject method
+ resizer.Inject(w, req, paramsJSON)
+
+ // Check the response
+ resp := w.Result()
+ defer resp.Body.Close()
+
+ require.Equal(t, http.StatusOK, resp.StatusCode, "Expected status OK")
+
+ // Verify the response contains image data
+ body, err := io.ReadAll(resp.Body)
+ require.NoError(t, err, "Failed to read response body")
+ require.NotEmpty(t, body, "Response body is empty")
+
+ // Check if the response is a valid image
+ // For PNG, we can check for the PNG signature
+ require.True(t, bytes.HasPrefix(body, []byte(pngMagic)), "Response is not a valid PNG image")
+
+ // Assert metrics were recorded properly
+ testhelper.AssertMetrics(t, req)
+}
+
// The Rails applications sends a Base64 encoded JSON string carrying
// these parameters in an HTTP response header
func encodeParams(t *testing.T, p *resizeParams) string {
diff --git a/workhorse/internal/metrics/requesttracker.go b/workhorse/internal/metrics/requesttracker.go
new file mode 100644
index 00000000000..e4ca980f191
--- /dev/null
+++ b/workhorse/internal/metrics/requesttracker.go
@@ -0,0 +1,61 @@
+// Package metrics is used to track various metrics and flags for incoming requests in GitLab Workhorse.
+// This package provides utilities to manage request metadata, such as setting and retrieving arbitrary flags.
+package metrics
+
+import (
+ "context"
+)
+
+const (
+ // KeyFetchedExternalURL is a flag key used to track whether the request fetched an external URL.
+ KeyFetchedExternalURL = "fetched_external_url"
+)
+
+// RequestTracker is a simple container for request metadata and flags
+type RequestTracker struct {
+ // Flags stores arbitrary string values for the request
+ Flags map[string]string
+}
+
+// NewRequestTracker creates a new RequestTracker
+func NewRequestTracker() *RequestTracker {
+ return &RequestTracker{
+ Flags: make(map[string]string),
+ }
+}
+
+// SetFlag sets a flag value
+func (rt *RequestTracker) SetFlag(key, value string) {
+ rt.Flags[key] = value
+}
+
+// GetFlag gets a flag value
+func (rt *RequestTracker) GetFlag(key string) (string, bool) {
+ val, ok := rt.Flags[key]
+ return val, ok
+}
+
+// HasFlag returns true if the flag exists and equals the given value
+func (rt *RequestTracker) HasFlag(key, value string) bool {
+ if val, ok := rt.Flags[key]; ok {
+ return val == value
+ }
+ return false
+}
+
+// Context key for storing the request tracker
+type contextKey string
+
+// TrackerKey is the key used to store the RequestTracker in context
+const TrackerKey contextKey = "requestTracker"
+
+// FromContext retrieves the RequestTracker from context
+func FromContext(ctx context.Context) (*RequestTracker, bool) {
+ rt, ok := ctx.Value(TrackerKey).(*RequestTracker)
+ return rt, ok
+}
+
+// NewContext creates a new context with the RequestTracker
+func NewContext(ctx context.Context, rt *RequestTracker) context.Context {
+ return context.WithValue(ctx, TrackerKey, rt)
+}
diff --git a/workhorse/internal/metrics/requesttracker_test.go b/workhorse/internal/metrics/requesttracker_test.go
new file mode 100644
index 00000000000..82afc3ebb11
--- /dev/null
+++ b/workhorse/internal/metrics/requesttracker_test.go
@@ -0,0 +1,87 @@
+package metrics
+
+import (
+ "context"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestSetAndGetFlag(t *testing.T) {
+ rt := NewRequestTracker()
+
+ // Test setting and getting a flag
+ rt.SetFlag("test_key", "test_value")
+ val, ok := rt.GetFlag("test_key")
+
+ require.True(t, ok)
+ require.Equal(t, "test_value", val)
+
+ // Test getting a non-existent flag
+ val, ok = rt.GetFlag("non_existent")
+
+ require.False(t, ok)
+ require.Empty(t, val)
+
+ // Test overwriting a flag
+ rt.SetFlag("test_key", "new_value")
+ val, ok = rt.GetFlag("test_key")
+
+ require.True(t, ok)
+ require.Equal(t, "new_value", val)
+}
+
+func TestHasFlag(t *testing.T) {
+ rt := NewRequestTracker()
+
+ // Set a flag
+ rt.SetFlag("test_key", "test_value")
+
+ // Test HasFlag with correct value
+ require.True(t, rt.HasFlag("test_key", "test_value"))
+
+ // Test HasFlag with incorrect value
+ require.False(t, rt.HasFlag("test_key", "wrong_value"))
+
+ // Test HasFlag with non-existent key
+ require.False(t, rt.HasFlag("non_existent", "any_value"))
+}
+
+func TestContextOperations(t *testing.T) {
+ // Create a base context
+ ctx := context.Background()
+
+ // Create a request tracker
+ rt := NewRequestTracker()
+ rt.SetFlag("test_key", "test_value")
+
+ // Test NewContext
+ ctxWithTracker := NewContext(ctx, rt)
+ require.NotEqual(t, ctx, ctxWithTracker)
+
+ // Test FromContext - successful retrieval
+ retrievedRT, ok := FromContext(ctxWithTracker)
+ require.True(t, ok)
+
+ // Verify the retrieved tracker has the expected flag
+ val, ok := retrievedRT.GetFlag("test_key")
+ require.True(t, ok)
+ require.Equal(t, "test_value", val)
+
+ // Test FromContext - no tracker in context
+ emptyRT, ok := FromContext(ctx)
+ require.False(t, ok)
+ require.Nil(t, emptyRT)
+}
+
+func TestPredefinedConstants(t *testing.T) {
+ // Test using the predefined constant
+ rt := NewRequestTracker()
+ rt.SetFlag(KeyFetchedExternalURL, "true")
+
+ val, ok := rt.GetFlag(KeyFetchedExternalURL)
+ require.True(t, ok)
+ require.Equal(t, "true", val)
+
+ require.True(t, rt.HasFlag(KeyFetchedExternalURL, "true"))
+}
diff --git a/workhorse/internal/sendurl/sendurl.go b/workhorse/internal/sendurl/sendurl.go
index 6e4f2ce08db..f372b8544d7 100644
--- a/workhorse/internal/sendurl/sendurl.go
+++ b/workhorse/internal/sendurl/sendurl.go
@@ -7,6 +7,7 @@ import (
"io"
"net/http"
"os"
+ "strconv"
"strings"
"sync"
"time"
@@ -14,6 +15,8 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/metrics"
+
"gitlab.com/gitlab-org/labkit/mask"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
@@ -117,6 +120,11 @@ func (e *entry) Inject(w http.ResponseWriter, r *http.Request, sendData string)
return
}
+ // Get the tracker from context and set flags
+ if tracker, ok := metrics.FromContext(r.Context()); ok {
+ tracker.SetFlag(metrics.KeyFetchedExternalURL, strconv.FormatBool(true))
+ }
+
setDefaultMethod(¶ms)
log.WithContextFields(r.Context(), log.Fields{
diff --git a/workhorse/internal/sendurl/sendurl_test.go b/workhorse/internal/sendurl/sendurl_test.go
index cb70b2ea0c5..8e37a989b7a 100644
--- a/workhorse/internal/sendurl/sendurl_test.go
+++ b/workhorse/internal/sendurl/sendurl_test.go
@@ -52,7 +52,12 @@ func testEntryServer(t *testing.T, requestURL string, httpHeaders http.Header, a
w.Header().Set("Date", "Wed, 21 Oct 2015 05:28:00 GMT")
w.Header().Set("Pragma", "no-cache")
+ // add metrics tracker
+ r = testhelper.RequestWithMetrics(t, r)
+
SendURL.Inject(w, r, data)
+
+ testhelper.AssertMetrics(t, r)
}
serveFile := func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, "GET", r.Method)
@@ -239,7 +244,11 @@ func TestPostRequest(t *testing.T) {
data := base64.URLEncoding.EncodeToString(jsonParams)
+ // add metrics tracker
+ r = testhelper.RequestWithMetrics(t, r)
+
SendURL.Inject(w, r, data)
+ testhelper.AssertMetrics(t, r)
}
externalPostURLHandler := func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, "POST", r.Method)
@@ -300,9 +309,13 @@ func TestErrorWithCustomStatusCode(t *testing.T) {
response := httptest.NewRecorder()
request := httptest.NewRequest("GET", "/target", nil)
+ // add metrics tracker
+ request = testhelper.RequestWithMetrics(t, request)
+
SendURL.Inject(response, request, data)
require.Equal(t, http.StatusTeapot, response.Code)
+ testhelper.AssertMetrics(t, request)
}
func TestHttpClientReuse(t *testing.T) {
diff --git a/workhorse/internal/testhelper/testhelper.go b/workhorse/internal/testhelper/testhelper.go
index 9c0d312d186..7a49f558146 100644
--- a/workhorse/internal/testhelper/testhelper.go
+++ b/workhorse/internal/testhelper/testhelper.go
@@ -19,6 +19,8 @@ import (
"github.com/golang-jwt/jwt/v5"
"github.com/stretchr/testify/require"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/metrics"
+
"gitlab.com/gitlab-org/labkit/log"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
@@ -272,3 +274,26 @@ func WriteExecutable(tb testing.TB, path string, content []byte) string {
return path
}
+
+// RequestWithMetrics wraps the given request with metrics tracking context.
+func RequestWithMetrics(t *testing.T, r *http.Request) *http.Request {
+ t.Helper()
+ // add metrics tracker
+ tracker := metrics.NewRequestTracker()
+ ctx := metrics.NewContext(r.Context(), tracker)
+
+ return r.WithContext(ctx)
+}
+
+// AssertMetrics checks if the request has the expected metrics tracking and flags.
+func AssertMetrics(t *testing.T, r *http.Request) {
+ t.Helper()
+
+ // check metrics
+ tracker, ok := metrics.FromContext(r.Context())
+ require.True(t, ok)
+
+ val, ok := tracker.GetFlag(metrics.KeyFetchedExternalURL)
+ require.True(t, ok)
+ require.Equal(t, "true", val)
+}
diff --git a/workhorse/internal/upstream/metrics.go b/workhorse/internal/upstream/metrics.go
index c2be4541c4e..03aac0f485e 100644
--- a/workhorse/internal/upstream/metrics.go
+++ b/workhorse/internal/upstream/metrics.go
@@ -1,6 +1,7 @@
package upstream
import (
+ "context"
"net/http"
"github.com/prometheus/client_golang/prometheus"
@@ -8,6 +9,8 @@ import (
"github.com/prometheus/client_golang/prometheus/promhttp"
"gitlab.com/gitlab-org/labkit/metrics"
+
+ wm "gitlab.com/gitlab-org/gitlab/workhorse/internal/metrics"
)
const (
@@ -28,7 +31,7 @@ var (
buildHandler = metrics.NewHandlerFactory(
metrics.WithNamespace(namespace),
- metrics.WithLabels("route", "route_id", "backend_id"))
+ metrics.WithLabels("route", "route_id", "backend_id", wm.KeyFetchedExternalURL))
)
func instrumentRoute(next http.Handler, _ string, metadata routeMetadata) http.Handler {
@@ -36,7 +39,20 @@ func instrumentRoute(next http.Handler, _ string, metadata routeMetadata) http.H
map[string]string{
"route": metadata.regexpStr,
"route_id": metadata.routeID,
- "backend_id": string(metadata.backendID)}))
+ "backend_id": string(metadata.backendID)}),
+ metrics.WithLabelFromContext(
+ wm.KeyFetchedExternalURL,
+ func(ctx context.Context) string {
+ if tracker, ok := wm.FromContext(ctx); ok {
+ val, ok := tracker.GetFlag(wm.KeyFetchedExternalURL)
+ if ok {
+ return val
+ }
+ }
+ return "false"
+ },
+ ),
+ )
}
func instrumentGeoProxyRoute(next http.Handler, _ string, metadata routeMetadata) http.Handler {
diff --git a/workhorse/internal/upstream/routes.go b/workhorse/internal/upstream/routes.go
index b83ebffaecc..5c57c9e8ce6 100644
--- a/workhorse/internal/upstream/routes.go
+++ b/workhorse/internal/upstream/routes.go
@@ -21,6 +21,7 @@ import (
gobpkg "gitlab.com/gitlab-org/gitlab/workhorse/internal/gob"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/imageresizer"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/metrics"
proxypkg "gitlab.com/gitlab-org/gitlab/workhorse/internal/proxy"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/queueing"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
@@ -138,6 +139,16 @@ func (u *upstream) observabilityMiddlewares(handler http.Handler, method string,
handler = instrumentGeoProxyRoute(handler, method, metadata) // Add Geo prometheus metrics
}
+ originalHandler := handler
+
+ // Wrap with metrics tracking (add the tracker to the context)
+ handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ tracker := metrics.NewRequestTracker()
+ ctx := metrics.NewContext(r.Context(), tracker)
+ r = r.WithContext(ctx)
+ originalHandler.ServeHTTP(w, r)
+ })
+
return handler
}
diff --git a/workhorse/internal/zipartifacts/open_archive.go b/workhorse/internal/zipartifacts/open_archive.go
index ffaaab07bf4..596910e5669 100644
--- a/workhorse/internal/zipartifacts/open_archive.go
+++ b/workhorse/internal/zipartifacts/open_archive.go
@@ -8,8 +8,8 @@ import (
"net/http"
"os"
"path/filepath"
- "strings"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/httprs"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/transport"
@@ -56,17 +56,13 @@ func OpenArchiveWithReaderFunc(ctx context.Context, location string, readerFunc
}
func openArchiveLocation(ctx context.Context, location string) (*archive, error) {
- if isURL(location) {
+ if helper.IsURL(location) {
return openHTTPArchive(ctx, location)
}
return openFileArchive(ctx, location)
}
-func isURL(path string) bool {
- return strings.HasPrefix(path, "http://") || strings.HasPrefix(path, "https://")
-}
-
func openHTTPArchive(ctx context.Context, archivePath string) (*archive, error) {
scrubbedArchivePath := mask.URL(archivePath)
req, err := http.NewRequest(http.MethodGet, archivePath, nil)