Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-05-09 00:12:34 +00:00
parent 00ea8056c8
commit 415658cdd2
52 changed files with 490 additions and 518 deletions

View File

@ -370,7 +370,7 @@
{"name":"letter_opener_web","version":"3.0.0","platform":"ruby","checksum":"3f391efe0e8b9b24becfab5537dfb17a5cf5eb532038f947daab58cb4b749860"},
{"name":"libyajl2","version":"2.1.0","platform":"ruby","checksum":"aa5df6c725776fc050c8418450de0f7c129cb7200b811907c4c0b3b5c0aea0ef"},
{"name":"license_finder","version":"7.2.1","platform":"ruby","checksum":"179ead19b64b170638b72fd16024233813673ac9d20d5ba75ae0b4444887ef14"},
{"name":"licensee","version":"9.17.1","platform":"ruby","checksum":"0be022c66d8853d35b08171a0f2575d5ccb5aef8a7020a1815938b4f26f4089a"},
{"name":"licensee","version":"9.18.0","platform":"ruby","checksum":"3e83db984fb7e4e51c98fea0e434138dcb6112f8c26dc7693734a4f8df99df77"},
{"name":"listen","version":"3.7.1","platform":"ruby","checksum":"3b80caa7aa77fae836916c2f9e3fbcafbd15f5d695dd487c1f5b5e7e465efe29"},
{"name":"llhttp-ffi","version":"0.4.0","platform":"ruby","checksum":"e5f7327db3cf8007e648342ef76347d6e0ae545a8402e519cca9c886eb37b001"},
{"name":"locale","version":"2.1.4","platform":"ruby","checksum":"522f9973ef3eee64aac9bca06d21db2fba675fa3d2cf61d21f42d1ca18a9f780"},
@ -614,7 +614,7 @@
{"name":"rest-client","version":"2.1.0","platform":"x86-mingw32","checksum":"fb543caf36cb555c05c6186aeb1273c6a1b059896e4cfd394e7269b20c40ca01"},
{"name":"rest-client","version":"2.1.0","platform":"x86-mswin32","checksum":"a35a3bb8d16ca39d110a946a2c805267f98ce07a0ae890e4512a45eadea47a6e"},
{"name":"retriable","version":"3.1.2","platform":"ruby","checksum":"0a5a5d0ca4ba61a76fb31a17ab8f7f80281beb040c329d34dfc137a1398688e0"},
{"name":"reverse_markdown","version":"2.1.1","platform":"ruby","checksum":"b2206466b682ac1177b6b8ec321d00a84fca02d096c5d676a7a0cc5838dc0701"},
{"name":"reverse_markdown","version":"3.0.0","platform":"ruby","checksum":"ab228386765a0259835873cd07054b62939c40f620c77c247eafaaa3b23faca4"},
{"name":"rexml","version":"3.4.1","platform":"ruby","checksum":"c74527a9a0a04b4ec31dbe0dc4ed6004b960af943d8db42e539edde3a871abca"},
{"name":"rinku","version":"2.0.0","platform":"ruby","checksum":"3e695aaf9f24baba3af45823b5c427b58a624582132f18482320e2737f9f8a85"},
{"name":"rotp","version":"6.3.0","platform":"ruby","checksum":"75d40087e65ed0d8022c33055a6306c1c400d1c12261932533b5d6cbcd868854"},
@ -712,7 +712,7 @@
{"name":"slack-messenger","version":"2.3.6","platform":"ruby","checksum":"58581e587debcbb769336cc7ebe4eb6ae411947fccf347e967a17ac9813e66d8"},
{"name":"snaky_hash","version":"2.0.0","platform":"ruby","checksum":"fe8b2e39e8ff69320f7812af73ea06401579e29ff1734a7009567391600687de"},
{"name":"snowplow-tracker","version":"0.8.0","platform":"ruby","checksum":"7ba6f4f1443a829845fd28e63eda72d9d3d247f485310ddcccaebbc52b734a38"},
{"name":"solargraph","version":"0.54.0","platform":"ruby","checksum":"3fd13f381e6428b3c593716ee6d3d5aa802ac647b8b13692d760ab7ac3e1fdc1"},
{"name":"solargraph","version":"0.54.2","platform":"ruby","checksum":"fe22f56ec2efe64f674b0e9dd3ac8a99df5b5833c2ca84993bdb2af2bb0b6c56"},
{"name":"solargraph-rspec","version":"0.5.1","platform":"ruby","checksum":"0dfc9124f17b23e95c30acb82c1f799c865408a56b17099b2d6d7b23a76bface"},
{"name":"sorbet-runtime","version":"0.5.11647","platform":"ruby","checksum":"64b65112f2e6a5323310ca9ac0d7d9a6be63aade5a62a6225fe066042ff4fdb6"},
{"name":"spamcheck","version":"1.3.3","platform":"ruby","checksum":"3a29ba9dfcd59543d88054d38c657f79e0a6cf44d763df08ad47680abed50ec7"},

View File

@ -1134,10 +1134,10 @@ GEM
tomlrb (>= 1.3, < 2.1)
with_env (= 1.1.0)
xml-simple (~> 1.1.9)
licensee (9.17.1)
licensee (9.18.0)
dotenv (>= 2, < 4)
octokit (>= 4.20, < 10.0)
reverse_markdown (>= 1, < 3)
reverse_markdown (>= 1, < 4)
rugged (>= 0.24, < 2.0)
thor (>= 0.19, < 2.0)
listen (3.7.1)
@ -1652,7 +1652,7 @@ GEM
mime-types (>= 1.16, < 4.0)
netrc (~> 0.8)
retriable (3.1.2)
reverse_markdown (2.1.1)
reverse_markdown (3.0.0)
nokogiri
rexml (3.4.1)
rinku (2.0.0)
@ -1832,9 +1832,9 @@ GEM
hashie
version_gem (~> 1.1)
snowplow-tracker (0.8.0)
solargraph (0.54.0)
solargraph (0.54.2)
backport (~> 1.2)
benchmark
benchmark (~> 0.4)
bundler (~> 2.0)
diff-lcs (~> 1.4)
jaro_winkler (~> 1.6)
@ -1845,7 +1845,7 @@ GEM
ostruct (~> 0.6)
parser (~> 3.0)
rbs (~> 3.3)
reverse_markdown (>= 2.0, < 4)
reverse_markdown (~> 3.0)
rubocop (~> 1.38)
thor (~> 1.0)
tilt (~> 2.0)

View File

@ -370,7 +370,7 @@
{"name":"letter_opener_web","version":"3.0.0","platform":"ruby","checksum":"3f391efe0e8b9b24becfab5537dfb17a5cf5eb532038f947daab58cb4b749860"},
{"name":"libyajl2","version":"2.1.0","platform":"ruby","checksum":"aa5df6c725776fc050c8418450de0f7c129cb7200b811907c4c0b3b5c0aea0ef"},
{"name":"license_finder","version":"7.2.1","platform":"ruby","checksum":"179ead19b64b170638b72fd16024233813673ac9d20d5ba75ae0b4444887ef14"},
{"name":"licensee","version":"9.17.1","platform":"ruby","checksum":"0be022c66d8853d35b08171a0f2575d5ccb5aef8a7020a1815938b4f26f4089a"},
{"name":"licensee","version":"9.18.0","platform":"ruby","checksum":"3e83db984fb7e4e51c98fea0e434138dcb6112f8c26dc7693734a4f8df99df77"},
{"name":"listen","version":"3.7.1","platform":"ruby","checksum":"3b80caa7aa77fae836916c2f9e3fbcafbd15f5d695dd487c1f5b5e7e465efe29"},
{"name":"llhttp-ffi","version":"0.4.0","platform":"ruby","checksum":"e5f7327db3cf8007e648342ef76347d6e0ae545a8402e519cca9c886eb37b001"},
{"name":"locale","version":"2.1.4","platform":"ruby","checksum":"522f9973ef3eee64aac9bca06d21db2fba675fa3d2cf61d21f42d1ca18a9f780"},
@ -614,7 +614,7 @@
{"name":"rest-client","version":"2.1.0","platform":"x86-mingw32","checksum":"fb543caf36cb555c05c6186aeb1273c6a1b059896e4cfd394e7269b20c40ca01"},
{"name":"rest-client","version":"2.1.0","platform":"x86-mswin32","checksum":"a35a3bb8d16ca39d110a946a2c805267f98ce07a0ae890e4512a45eadea47a6e"},
{"name":"retriable","version":"3.1.2","platform":"ruby","checksum":"0a5a5d0ca4ba61a76fb31a17ab8f7f80281beb040c329d34dfc137a1398688e0"},
{"name":"reverse_markdown","version":"2.1.1","platform":"ruby","checksum":"b2206466b682ac1177b6b8ec321d00a84fca02d096c5d676a7a0cc5838dc0701"},
{"name":"reverse_markdown","version":"3.0.0","platform":"ruby","checksum":"ab228386765a0259835873cd07054b62939c40f620c77c247eafaaa3b23faca4"},
{"name":"rexml","version":"3.4.1","platform":"ruby","checksum":"c74527a9a0a04b4ec31dbe0dc4ed6004b960af943d8db42e539edde3a871abca"},
{"name":"rinku","version":"2.0.0","platform":"ruby","checksum":"3e695aaf9f24baba3af45823b5c427b58a624582132f18482320e2737f9f8a85"},
{"name":"rotp","version":"6.3.0","platform":"ruby","checksum":"75d40087e65ed0d8022c33055a6306c1c400d1c12261932533b5d6cbcd868854"},
@ -712,7 +712,7 @@
{"name":"slack-messenger","version":"2.3.6","platform":"ruby","checksum":"58581e587debcbb769336cc7ebe4eb6ae411947fccf347e967a17ac9813e66d8"},
{"name":"snaky_hash","version":"2.0.0","platform":"ruby","checksum":"fe8b2e39e8ff69320f7812af73ea06401579e29ff1734a7009567391600687de"},
{"name":"snowplow-tracker","version":"0.8.0","platform":"ruby","checksum":"7ba6f4f1443a829845fd28e63eda72d9d3d247f485310ddcccaebbc52b734a38"},
{"name":"solargraph","version":"0.54.0","platform":"ruby","checksum":"3fd13f381e6428b3c593716ee6d3d5aa802ac647b8b13692d760ab7ac3e1fdc1"},
{"name":"solargraph","version":"0.54.2","platform":"ruby","checksum":"fe22f56ec2efe64f674b0e9dd3ac8a99df5b5833c2ca84993bdb2af2bb0b6c56"},
{"name":"solargraph-rspec","version":"0.5.1","platform":"ruby","checksum":"0dfc9124f17b23e95c30acb82c1f799c865408a56b17099b2d6d7b23a76bface"},
{"name":"sorbet-runtime","version":"0.5.11647","platform":"ruby","checksum":"64b65112f2e6a5323310ca9ac0d7d9a6be63aade5a62a6225fe066042ff4fdb6"},
{"name":"spamcheck","version":"1.3.3","platform":"ruby","checksum":"3a29ba9dfcd59543d88054d38c657f79e0a6cf44d763df08ad47680abed50ec7"},

View File

@ -1134,10 +1134,10 @@ GEM
tomlrb (>= 1.3, < 2.1)
with_env (= 1.1.0)
xml-simple (~> 1.1.9)
licensee (9.17.1)
licensee (9.18.0)
dotenv (>= 2, < 4)
octokit (>= 4.20, < 10.0)
reverse_markdown (>= 1, < 3)
reverse_markdown (>= 1, < 4)
rugged (>= 0.24, < 2.0)
thor (>= 0.19, < 2.0)
listen (3.7.1)
@ -1652,7 +1652,7 @@ GEM
mime-types (>= 1.16, < 4.0)
netrc (~> 0.8)
retriable (3.1.2)
reverse_markdown (2.1.1)
reverse_markdown (3.0.0)
nokogiri
rexml (3.4.1)
rinku (2.0.0)
@ -1832,9 +1832,9 @@ GEM
hashie
version_gem (~> 1.1)
snowplow-tracker (0.8.0)
solargraph (0.54.0)
solargraph (0.54.2)
backport (~> 1.2)
benchmark
benchmark (~> 0.4)
bundler (~> 2.0)
diff-lcs (~> 1.4)
jaro_winkler (~> 1.6)
@ -1845,7 +1845,7 @@ GEM
ostruct (~> 0.6)
parser (~> 3.0)
rbs (~> 3.3)
reverse_markdown (>= 2.0, < 4)
reverse_markdown (~> 3.0)
rubocop (~> 1.38)
thor (~> 1.0)
tilt (~> 2.0)

View File

@ -11,8 +11,15 @@ export function setReviewersForList({ issuableId, listId, reviewers = [] } = {})
export function getReviewersForList({ issuableId, listId } = {}) {
const id = cacheId({ issuableId, listId });
const list = window.sessionStorage.getItem(id);
let parsedList;
return list ? JSON.parse(list) : [];
try {
parsedList = list ? JSON.parse(list) : [];
} catch {
parsedList = [];
}
return parsedList;
}
export function suggestedPosition({ username, list = [] } = {}) {

View File

@ -58,7 +58,7 @@ export default {
<template>
<div class="gl-inline-flex gl-items-center">
<gl-icon v-if="icon" :name="icon" class="gl-mr-3 gl-min-w-5" variant="subtle" />
<gl-icon v-if="icon" :name="icon" class="gl-mr-2 gl-min-w-5" variant="subtle" />
<tooltip-on-truncate v-if="link" :title="text" class="gl-truncate" :class="sizeClass">
<gl-link :href="link">
{{ text }}

View File

@ -84,7 +84,7 @@ export default {
<div
v-for="(row, metadataIndex) in metadataSlots"
:key="metadataIndex"
class="gl-mr-5 gl-flex gl-items-center"
class="gl-mr-3 gl-flex gl-items-center"
>
<slot :name="row"></slot>
</div>

View File

@ -24,7 +24,7 @@ module EventForward
payload = Gitlab::Json.parse(request.raw_post)
events_to_forward = payload['data'].select do |event|
event_eligibility_checker.eligible?(event['se_ac'])
event_eligibility_checker.eligible?(event['se_ac'], event['aid'])
end
events_to_forward.each do |event|

View File

@ -310,21 +310,51 @@ class IssuableFinder
end
end
# rubocop: disable CodeReuse/ActiveRecord
def by_parent(items)
# When finding issues for multiple projects it's more efficient
# to use a JOIN instead of running a sub-query
# See https://gitlab.com/gitlab-org/gitlab/-/commit/8591cc02be6b12ed60f763a5e0147f2cbbca99e1
if params.projects.is_a?(ActiveRecord::Relation)
items.merge(params.projects.reorder(nil)).join_project
elsif params.projects
items.of_projects(params.projects).references_project
return items.none unless params.projects
if use_namespace_filtering?
filter_by_namespace(items)
else
items.none
filter_by_project(items)
end
end
# rubocop: disable CodeReuse/ActiveRecord
def filter_by_namespace(items)
if use_join_strategy_for_project?
# When finding issues for multiple projects it's more efficient
# to use a JOIN instead of running a sub-query
# See https://gitlab.com/gitlab-org/gitlab/-/commit/8591cc02be6b12ed60f763a5e0147f2cbbca99e1
items.join_project_through_namespace.merge(params.projects.reorder(nil))
else
items.in_namespaces(params.projects.map(&:project_namespace_id)).references_project
end
end
def filter_by_project(items)
if use_join_strategy_for_project?
# When finding issues for multiple projects it's more efficient
# to use a JOIN instead of running a sub-query
# See https://gitlab.com/gitlab-org/gitlab/-/commit/8591cc02be6b12ed60f763a5e0147f2cbbca99e1
items.merge(params.projects.reorder(nil)).join_project
else
items.of_projects(params.projects).references_project
end
end
# rubocop: enable CodeReuse/ActiveRecord
def use_namespace_filtering?
::Feature.enabled?(:use_namespace_id_for_issue_and_work_item_finders, current_user, type: :wip) &&
[::Issue, ::WorkItem].include?(klass)
end
def use_join_strategy_for_project?
strong_memoize(:use_join_strategy_for_project) do
params.projects.is_a?(ActiveRecord::Relation)
end
end
# rubocop: disable CodeReuse/ActiveRecord
def by_search(items)
return items unless search
@ -491,7 +521,17 @@ class IssuableFinder
end
def by_non_archived(items)
params[:non_archived].present? ? items.non_archived : items
if params[:non_archived].present?
if use_namespace_filtering?
# If use_join_strategy_for_project? is true, items has been joined onto project already, and we don't need to
# perform the join again
items.non_archived(use_existing_join: use_join_strategy_for_project?)
else
items.non_archived
end
else
items
end
end
def by_crm_contact(items)

View File

@ -502,10 +502,13 @@ module Ci
# sha - The commit SHA (or multiple SHAs) to limit the list of pipelines to.
# limit - Number of pipelines to return. Chaining with sampling methods (#pick, #take)
# will cause unnecessary subqueries.
def self.newest_first(ref: nil, sha: nil, limit: nil)
def self.newest_first(ref: nil, sha: nil, limit: nil, source: nil)
relation = order(id: :desc)
relation = relation.where(ref: ref) if ref
relation = relation.where(sha: sha) if sha
if source && Feature.enabled?(:source_filter_pipelines, :current_request)
relation = relation.where(source: source)
end
if limit
ids = relation.limit(limit).select(:id)

View File

@ -152,7 +152,14 @@ class Issue < ApplicationRecord
scope :in_projects, ->(project_ids) { where(project_id: project_ids) }
scope :not_in_projects, ->(project_ids) { where.not(project_id: project_ids) }
scope :non_archived, -> { left_joins(:project).where(project_id: nil).or(where(projects: { archived: false })) }
scope :join_project_through_namespace, -> do
joins("JOIN projects ON projects.project_namespace_id = issues.namespace_id")
end
scope :non_archived, ->(use_existing_join: false) do
relation = use_existing_join ? self : left_joins(:project)
relation.where(project_id: nil).or(relation.where(projects: { archived: false }))
end
scope :with_due_date, -> { where.not(due_date: nil) }
scope :without_due_date, -> { where(due_date: nil) }

View File

@ -703,7 +703,7 @@ class MergeRequest < ApplicationRecord
def merge_pipeline
if sha = merged_commit_sha
target_project.latest_pipeline(target_branch, sha)
target_project.latest_pipeline(target_branch, sha, :push)
end
end

View File

@ -1516,16 +1516,16 @@ class Project < ApplicationRecord
latest_successful_build_for_ref(job_name, ref) || raise(ActiveRecord::RecordNotFound, "Couldn't find job #{job_name}")
end
def latest_pipelines(ref: default_branch, sha: nil, limit: nil)
def latest_pipelines(ref: default_branch, sha: nil, limit: nil, source: nil)
ref = ref.presence || default_branch
sha ||= commit(ref)&.sha
return ci_pipelines.none unless sha
ci_pipelines.newest_first(ref: ref, sha: sha, limit: limit)
ci_pipelines.newest_first(ref: ref, sha: sha, limit: limit, source: source)
end
def latest_pipeline(ref = default_branch, sha = nil)
latest_pipelines(ref: ref, sha: sha).take
def latest_pipeline(ref = default_branch, sha = nil, source = nil)
latest_pipelines(ref: ref, sha: sha, source: source).take
end
def merge_base_commit(first_commit_id, second_commit_id)

View File

@ -167,6 +167,7 @@ class GroupPolicy < Namespaces::GroupProjectNamespaceSharedPolicy
enable :read_confidential_issues
enable :read_crm_organization
enable :read_crm_contact
enable :read_internal_note
end
rule { admin | organization_owner }.policy do
@ -212,11 +213,6 @@ class GroupPolicy < Namespaces::GroupProjectNamespaceSharedPolicy
enable :read_achievement
end
rule { can?(:maintainer_access) }.policy do
enable :admin_achievement
enable :award_achievement
end
rule { can?(:owner_access) }.policy do
enable :destroy_user_achievement
enable :set_issue_created_at
@ -264,62 +260,62 @@ class GroupPolicy < Namespaces::GroupProjectNamespaceSharedPolicy
enable :read_crm_contact
enable :read_confidential_issues
enable :read_ci_cd_analytics
enable :read_internal_note
end
rule { maintainer }.policy do
enable :destroy_package
enable :import_projects
enable :admin_pipeline
enable :admin_build
enable :maintainer_access
enable :add_cluster
enable :create_cluster
enable :update_cluster
enable :admin_achievement
enable :admin_build
enable :admin_cluster
enable :admin_pipeline
enable :admin_push_rules
enable :admin_upload
enable :award_achievement
enable :create_cluster
enable :create_jira_connect_subscription
enable :destroy_package
enable :destroy_upload
enable :import_projects
enable :read_deploy_token
enable :read_group_runners
enable :create_jira_connect_subscription
enable :maintainer_access
enable :admin_upload
enable :destroy_upload
enable :admin_push_rules
enable :update_cluster
end
rule { owner }.policy do
enable :admin_group
enable :admin_namespace
enable :admin_group_member
enable :admin_package
enable :admin_runner
enable :admin_integrations
enable :admin_protected_environments
enable :change_visibility_level
enable :read_usage_quotas
enable :read_group_runners
enable :register_group_runners
enable :create_runner
enable :destroy_issue
enable :set_note_created_at
enable :set_emails_disabled
enable :change_prevent_sharing_groups_outside_hierarchy
enable :set_show_diff_preview_in_email
enable :change_seat_control
enable :change_new_user_signups_cap
enable :update_default_branch_protection
enable :create_deploy_token
enable :destroy_deploy_token
enable :read_runners_registration_token
enable :update_runners_registration_token
enable :owner_access
enable :update_git_access_protocol
enable :admin_cicd_variables
enable :read_billing
enable :admin_group
enable :admin_group_member
enable :admin_integrations
enable :admin_namespace
enable :admin_package
enable :admin_protected_environments
enable :admin_runner
enable :change_new_user_signups_cap
enable :change_prevent_sharing_groups_outside_hierarchy
enable :change_seat_control
enable :change_visibility_level
enable :create_deploy_token
enable :create_runner
enable :create_subgroup
enable :destroy_deploy_token
enable :destroy_issue
enable :edit_billing
enable :remove_group
enable :manage_merge_request_settings
enable :read_billing
enable :read_group_runners
enable :read_runners_registration_token
enable :read_usage_quotas
enable :register_group_runners
enable :remove_group
enable :set_emails_disabled
enable :set_note_created_at
enable :set_show_diff_preview_in_email
enable :update_default_branch_protection
enable :update_git_access_protocol
enable :update_runners_registration_token
end
rule { can?(:read_nested_project_resources) }.policy do
@ -336,7 +332,6 @@ class GroupPolicy < Namespaces::GroupProjectNamespaceSharedPolicy
enable :read_nested_project_resources
end
rule { owner }.enable :create_subgroup
rule { maintainer & maintainer_can_create_group }.enable :create_subgroup
rule { public_group | logged_in_viewable }.enable :view_globally
@ -450,9 +445,6 @@ class GroupPolicy < Namespaces::GroupProjectNamespaceSharedPolicy
rule { can?(:admin_group) | can?(:admin_runner) }.enable :admin_group_or_admin_runner
# Should be matched with ProjectPolicy#read_internal_note
rule { admin | reporter | planner }.enable :read_internal_note
rule { can?(:remove_group) }.enable :view_edit_page
# TODO: Remove this rule and move :read_package permission from reporter to guest

View File

@ -414,6 +414,7 @@ class ProjectPolicy < BasePolicy
enable :download_wiki_code
enable :create_wiki
enable :admin_wiki
enable :read_internal_note
enable :read_merge_request
enable :export_work_items
end
@ -464,6 +465,7 @@ class ProjectPolicy < BasePolicy
enable :read_package
enable :read_ci_cd_analytics
enable :read_external_emails
enable :read_internal_note
enable :read_grafana
enable :export_work_items
enable :create_design
@ -1198,9 +1200,6 @@ class ProjectPolicy < BasePolicy
enable :read_code
end
# Should be matched with GroupPolicy#read_internal_note
rule { admin | planner_or_reporter_access }.enable :read_internal_note
rule { can?(:developer_access) & namespace_catalog_available }.policy do
enable :read_namespace_catalog
end

View File

@ -3,6 +3,8 @@
module Ci
module ResourceGroups
class AssignResourceFromResourceGroupService < ::BaseService
include Gitlab::InternalEventsTracking
RESPAWN_WAIT_TIME = 1.minute
def execute(resource_group)
@ -25,6 +27,17 @@ module Ci
processable.drop!(:failed_outdated_deployment_job)
else
processable.enqueue_waiting_for_resource
track_internal_event(
"job_enqueued_by_resource_group",
user: processable.user,
project: resource_group.project,
additional_properties: {
label: resource_group.process_mode,
property: processable.id.to_s,
resource_group_id: resource_group.id
}
)
end
end
end

View File

@ -0,0 +1,23 @@
---
description: Job enqueued by resource group
internal_events: true
action: job_enqueued_by_resource_group
identifiers:
- project
- namespace
additional_properties:
label:
description: process_mode of the resource group
property:
description: id of the job which was enqueued
resource_group_id:
description: id of the resource group
product_group: environments
product_categories:
- continuous_delivery
milestone: '18.0'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/189859
tiers:
- free
- premium
- ultimate

View File

@ -0,0 +1,10 @@
---
name: source_filter_pipelines
description:
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/418120
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/189704
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/540892
milestone: '18.0'
group: group::pipeline execution
type: gitlab_com_derisk
default_enabled: false

View File

@ -0,0 +1,10 @@
---
name: use_namespace_id_for_issue_and_work_item_finders
description:
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/536351
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/188961
rollout_issue_url:
milestone: '18.0'
group: group::product planning
type: wip
default_enabled: false

View File

@ -0,0 +1,23 @@
---
key_path: redis_hll_counters.count_distinct_label_from_job_enqueued_by_resource_group
description: Count of unique jobs enqueued by process mode
product_group: environments
product_categories:
- continuous_delivery
performance_indicator_type: []
value_type: number
status: active
milestone: '18.0'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/189859
time_frame:
- 28d
- 7d
data_source: internal_events
data_category: optional
tiers:
- free
- premium
- ultimate
events:
- name: job_enqueued_by_resource_group
unique: label

View File

@ -0,0 +1,23 @@
---
key_path: redis_hll_counters.count_distinct_project_id_from_job_enqueued_by_resource_group
description: Count of unique projects that use resource groups
product_group: environments
product_categories:
- continuous_delivery
performance_indicator_type: []
value_type: number
status: active
milestone: '18.0'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/189859
time_frame:
- 28d
- 7d
data_source: internal_events
data_category: optional
tiers:
- free
- premium
- ultimate
events:
- name: job_enqueued_by_resource_group
unique: project.id

View File

@ -12,7 +12,7 @@ title: Gitaly and Gitaly Cluster
{{< /details >}}
[Gitaly](https://gitlab.com/gitlab-org/gitaly) provides high-level RPC access to Git repositories.
[Gitaly](https://gitlab.com/gitlab-org/gitaly) provides high-level remote procedure call (RPC) access to Git repositories.
It is used by GitLab to read and write Git data.
Gitaly is present in every GitLab installation and coordinates Git repository

View File

@ -41,11 +41,21 @@ to start using GitLab Duo in your IDEs. No further action is needed.
Users assigned the [Guest role](../administration/guest_users.md) do not have
access to GitLab Duo Core.
{{< alert type="note" >}}
### GitLab Duo Core limits
Your eligibility for GitLab Duo Core may be subject to rate limits.
Usage limits, along with [the GitLab Terms of Service](https://about.gitlab.com/terms/),
apply to Premium and Ultimate customers' use of the included Code Suggestions and GitLab Duo Chat features.
{{< /alert >}}
GitLab will provide 30 days prior notice before enforcement of these limits take effect.
At that time, organization administrators will have tools to monitor and manage consumption and will be able
to purchase additional capacity.
| Feature | Requests per user per month |
|------------------|-----------------------------|
| Code Suggestions | 2,000 |
| GitLab Duo Chat | 100 |
Limits do not apply to GitLab Duo Pro or Enterprise.
## GitLab Duo Pro and Enterprise

View File

@ -199,16 +199,16 @@ behavior of projects that are assigned to a compliance framework.
| Control name | Control ID | Description |
|:---------------------------------------------------------|:-----------------------------------------------------------|:------------|
| API security running | `scanner_api_security_running` | Ensures that [API security scanning](../application_security/api_security/_index.md) is configured and running in the project pipelines. |
| At least one approval | `minimum_approvals_required_1` | Ensures that merge requests [require at least one approvals](../project/merge_requests/approvals/_index.md) before merging. |
| At least one approval | `minimum_approvals_required_1` | Ensures that merge requests [require at least one approvals](../project/merge_requests/approvals/_index.md) before merging. |
| At least two approvals | `minimum_approvals_required_2` | Ensures that merge requests [require at least two approvals](../project/merge_requests/approvals/_index.md) before merging. |
| Auth SSO enabled | `auth_sso_enabled` | Ensures that [Single Sign-On (SSO) authentication](../group/saml_sso/_index.md) is enabled for the project. |
| Author approved merge request | `merge_request_prevent_author_approval` | Ensures that the author of a merge request [cannot approve their own changes](../project/merge_requests/approvals/_index.md). |
| Author approved merge request is forbidden | `merge_request_prevent_author_approval` | Ensures that the author of a merge request [cannot approve their own changes](../project/merge_requests/approvals/_index.md). |
| Branch deletion disabled | `branch_deletion_disabled` | Ensures that [branches can't be deleted](../project/repository/branches/protected.md). |
| CI/CD job token scope enabled | `cicd_job_token_scope_enabled` | Ensures that [CI/CD job token](../../ci/jobs/ci_job_token.md) scope restrictions are enabled. |
| Code changes requires code owners | `code_changes_requires_code_owners` | Ensures that code changes require approval from [code owners](../project/codeowners/_index.md). |
| Code owner approval required | `code_owner_approval_required` | Ensures that [code owners file](../project/codeowners/_index.md) is configured. |
| Code quality running | `scanner_code_quality_running` | Ensures that [code quality scanning](../../ci/testing/code_quality.md) is configured and running in the project pipelines. |
| Committers approved merge request | `merge_request_prevent_committers_approval` | Ensures that users who have [committed to a merge request cannot approve it](../project/merge_requests/approvals/_index.md). |
| Committers approved merge request is forbidden | `merge_request_prevent_committers_approval` | Ensures that users who have [committed to a merge request cannot approve it](../project/merge_requests/approvals/_index.md). |
| Container scanning running | `scanner_container_scanning_running` | Ensures that [container scanning](../application_security/container_scanning/_index.md) is configured and running in the project pipelines. |
| DAST running | `scanner_dast_running` | Ensures that [Dynamic Application Security Testing](../application_security/dast/_index.md) (DAST) is configured and running in the project pipelines. |
| Default branch protected | `default_branch_protected` | Ensures that the default branch has [protection rules](../project/repository/branches/protected.md) enabled. |
@ -232,17 +232,17 @@ behavior of projects that are assigned to a compliance framework.
| Merge requests require code owner approval | `merge_requests_require_code_owner_approval` | Ensures that merge requests require approval from [code owners](../project/codeowners/_index.md). |
| More members than admins | `more_members_than_admins` | Ensures fewer [administrators](../project/members/_index.md) are assigned to the project than total members. |
| Package Hunter no findings untriaged | `package_hunter_no_findings_untriaged` | Ensures that all [Package Hunter](../application_security/triage/_index.md) findings are triaged. |
| Project archived | `project_archived` | Checks whether the [project is archived](../project/settings/_index.md). Typically `false` is compliant. |
| Project marked for deletion | `project_marked_for_deletion` | Checks whether the [project is marked for deletion](../project/settings/_index.md). `false` is compliant. |
| Project not archived | `project_archived` | Checks whether the [project is archived](../project/settings/_index.md). Typically `false` is compliant. |
| Project not marked for deletion | `project_marked_for_deletion` | Checks whether the [project is marked for deletion](../project/settings/_index.md). `false` is compliant. |
| Project pipelines not public | `project_pipelines_not_public` | Ensures that [project pipelines are not publicly visible](../../ci/pipelines/settings.md). |
| Project repository exists | `project_repo_exists` | Ensures that a [Git repository](../../topics/git/_index.md) exists for the project. |
| Project repository exists | `project_repo_exists` | Ensures that a [Git repository](../../topics/git/_index.md) exists for the project. |
| Project user defined variables restricted to maintainers | `project_user_defined_variables_restricted_to_maintainers` | Ensures that only maintainers can create [project variables](../../ci/variables/_index.md). |
| Project visibility not public | `project_visibility_not_public` | Ensures that projects are not set to [public visibility](../public_access.md). |
| Protected branches exist | `protected_branches_set` | Ensures that project contains [protected branches](../project/repository/branches/protected.md). |
| Push protection enabled | `push_protection_enabled` | Ensures that [push protection](../project/repository/push_rules.md) is enabled for sensitive files. |
| Require branch up to date | `require_branch_up_to_date` | Ensures that the [source branch is up to date with the target branch before merging](../project/merge_requests/methods/_index.md). |
| Require linear history | `require_linear_history` | Ensures a [linear commit history](../project/merge_requests/methods/_index.md#fast-forward-merge) by forbidding merge commits. |
| Require MFA at organization level | `require_mfa_at_org_level` | Ensures that [multi-factor authentication](../profile/account/two_factor_authentication.md) is required at the organization level. |
| Require MFA at organization level | `require_mfa_at_org_level` | Ensures that [multi-factor authentication](../profile/account/two_factor_authentication.md) is required at the organization level. |
| Require MFA for contributors | `require_mfa_for_contributors` | Ensures that [contributors have multi-factor authentication enabled](../profile/account/two_factor_authentication.md). |
| Requires signed commits | `require_signed_commits` | Ensures that [signed commits](../project/repository/signed_commits) are required. |
| Reset approvals on push | `reset_approvals_on_push` | Ensures that [approvals are reset when new commits are pushed](../project/merge_requests/approvals/settings.md) to the merge request. |

View File

@ -701,7 +701,7 @@ Use the commands to quickly accomplish specific tasks.
{{< details >}}
- Tier: Premium, Ultimate
- Add-on: GitLab Duo Core, Pro or Enterprise
- Add-on: GitLab Duo Core, Pro, or Enterprise
- Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated
- Editors: GitLab UI, Web IDE, VS Code, JetBrains IDEs

View File

@ -15942,12 +15942,6 @@ msgstr ""
msgid "ComplianceStandardsAdherence|Checks"
msgstr ""
msgid "ComplianceStandardsAdherence|Checks if project is archived (typically false is compliant)."
msgstr ""
msgid "ComplianceStandardsAdherence|Checks if project is marked for deletion (false is compliant)."
msgstr ""
msgid "ComplianceStandardsAdherence|Configure DAST in your CI/CD pipeline to automatically test your application for security issues."
msgstr ""
@ -16281,6 +16275,12 @@ msgstr ""
msgid "ComplianceStandardsAdherence|Ensures that project contains protected branches."
msgstr ""
msgid "ComplianceStandardsAdherence|Ensures that project is not archived."
msgstr ""
msgid "ComplianceStandardsAdherence|Ensures that project is not marked for deletion."
msgstr ""
msgid "ComplianceStandardsAdherence|Ensures that signed commits are required"
msgstr ""
@ -66901,12 +66901,52 @@ msgstr ""
msgid "VirtualRegistries|Virtual Registries"
msgstr ""
msgid "VirtualRegistry|%{count} artifact"
msgid_plural "VirtualRegistry|%{count} artifacts"
msgstr[0] ""
msgstr[1] ""
msgid "VirtualRegistry|%{hours} hour cache"
msgid_plural "VirtualRegistry|%{hours} hours cache"
msgstr[0] ""
msgstr[1] ""
msgid "VirtualRegistry|%{size} storage used"
msgstr ""
msgid "VirtualRegistry|Add upstream"
msgstr ""
msgid "VirtualRegistry|Cache: %{size}"
msgstr ""
msgid "VirtualRegistry|Caching period"
msgstr ""
msgid "VirtualRegistry|Clear cache"
msgstr ""
msgid "VirtualRegistry|Create Maven registry"
msgstr ""
msgid "VirtualRegistry|Create registry"
msgstr ""
msgid "VirtualRegistry|Create upstream"
msgstr ""
msgid "VirtualRegistry|Delete upstream"
msgstr ""
msgid "VirtualRegistry|Description (optional)"
msgstr ""
msgid "VirtualRegistry|Edit upstream"
msgstr ""
msgid "VirtualRegistry|Enter password"
msgstr ""
msgid "VirtualRegistry|Failed to fetch list of maven virtual registries."
msgstr ""
@ -66922,24 +66962,54 @@ msgstr ""
msgid "VirtualRegistry|Maven virtual registry"
msgstr ""
msgid "VirtualRegistry|Move upstream down"
msgstr ""
msgid "VirtualRegistry|Move upstream up"
msgstr ""
msgid "VirtualRegistry|Name"
msgstr ""
msgid "VirtualRegistry|New maven virtual registry"
msgstr ""
msgid "VirtualRegistry|No upstreams yet"
msgstr ""
msgid "VirtualRegistry|Password (optional)"
msgstr ""
msgid "VirtualRegistry|Registry types"
msgstr ""
msgid "VirtualRegistry|Test upstream"
msgstr ""
msgid "VirtualRegistry|There are no maven virtual registries yet"
msgstr ""
msgid "VirtualRegistry|There is a problem with this cached upstream"
msgstr ""
msgid "VirtualRegistry|Time in hours"
msgstr ""
msgid "VirtualRegistry|Updated %{date}"
msgstr ""
msgid "VirtualRegistry|Upstream URL"
msgstr ""
msgid "VirtualRegistry|Upstreams"
msgstr ""
msgid "VirtualRegistry|Use the arrow buttons to reorder upstreams. Artifacts are resolved from top to bottom."
msgstr ""
msgid "VirtualRegistry|Username (optional)"
msgstr ""
msgid "VirtualRegistry|Virtual registries"
msgstr ""

View File

@ -41,7 +41,6 @@ ee/spec/frontend/ml/ai_agents/views/edit_agent_spec.js
ee/spec/frontend/oncall_schedule/schedule/components/preset_days/days_header_sub_item_spec.js
ee/spec/frontend/pages/admin/application_settings/general/components/allowed_integrations_spec.js
ee/spec/frontend/product_analytics/onboarding/onboarding_view_spec.js
ee/spec/frontend/projects/merge_requests/blocking_mr_input_root_spec.js
ee/spec/frontend/projects/settings/branch_rules/components/view/index_spec.js
ee/spec/frontend/projects/settings/components/shared_runners_toggle_spec.js
ee/spec/frontend/related_items_tree/components/related_items_tree_body_spec.js

View File

@ -17,6 +17,7 @@ describe('reviewer_positions utility', () => {
getItem: jest.fn().mockImplementation((key) => {
const vals = {
'MergeRequest/123/test-list-id': mockReviewersString,
'MergeRequest/123/invalid-data': '#',
};
return vals[key];
@ -64,6 +65,15 @@ describe('reviewer_positions utility', () => {
expect(result).toEqual([]);
});
it('returns an empty array when the data in storage is not a valid JSON string', () => {
const result = getReviewersForList({
issuableId: mockIssuableId,
listId: 'invalid-data',
});
expect(result).toEqual([]);
});
});
describe('suggestedPosition', () => {

View File

@ -126,7 +126,7 @@ RSpec.describe Resolvers::Users::ParticipantsResolver do
query.call
end
it 'does not execute N+1 for project relation' do
it 'does not execute N+1 for project relation', :request_store do
control_count = ActiveRecord::QueryRecorder.new { query.call }
create(:award_emoji, :upvote, awardable: issue)

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe SnippetsHelper do
RSpec.describe SnippetsHelper, feature_category: :source_code_management do
include Gitlab::Routing
include IconsHelper
include BadgesHelper

View File

@ -3020,6 +3020,30 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
end
end
describe '.newest_first' do
let_it_be(:merged_commit_pipeline) do
create(
:ci_pipeline,
status: 'success',
ref: 'master',
sha: '123',
source: :push
)
end
before do
stub_feature_flags(source_filter_pipelines: true)
end
it 'returns the newest pipeline by source' do
expect(described_class.newest_first(source: :push)).to contain_exactly(merged_commit_pipeline)
end
it 'returns empty when a specified source has no pipelines' do
expect(described_class.newest_first(source: :schedule)).to be_empty
end
end
describe '.latest_pipeline_per_commit' do
let!(:commit_123_ref_master) do
create(

View File

@ -2850,6 +2850,27 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
end
end
end
context 'when the MR is merged and there is a pipeline schedule source' do
let(:sha) { subject.target_project.commit.id }
let(:pipeline) { create(:ci_empty_pipeline, sha: sha, ref: subject.target_branch, project: subject.target_project) }
let(:schedule_pipeline) { create(:ci_empty_pipeline, sha: sha, ref: subject.target_branch, project: subject.target_project, source: :schedule) }
before do
stub_feature_flags(source_filter_pipelines: true)
subject.mark_as_merged!
end
context 'and merged_commit_sha is present' do
before do
subject.update_attribute(:merged_commit_sha, pipeline.sha)
end
it 'returns the pipeline associated with that merge request' do
expect(subject.merge_pipeline).to eq(pipeline)
end
end
end
end
describe '#has_ci?' do

View File

@ -3791,6 +3791,22 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
it { is_expected.to eq(other_pipeline_for_default_branch) }
end
context 'with provided source' do
before do
stub_feature_flags(source_filter_pipelines: true)
end
subject { project.latest_pipeline(project.default_branch, project.commit.parent.id, :push) }
it { is_expected.to eq(other_pipeline_for_default_branch) }
end
context 'with provided source that does not contain any pipelines' do
subject { project.latest_pipeline(project.default_branch, project.commit.parent.id, :schedule) }
it { is_expected.to be_nil }
end
end
context 'provided ref' do

View File

@ -91,8 +91,8 @@ RSpec.describe EventForward::EventForwardController, feature_category: :product_
context 'when filtering events by eligibility' do
before do
allow(event_eligibility_checker).to receive(:eligible?).with("event_1").and_return(true)
allow(event_eligibility_checker).to receive(:eligible?).with("event_2").and_return(false)
allow(event_eligibility_checker).to receive(:eligible?).with("event_1", "app_id_1").and_return(true)
allow(event_eligibility_checker).to receive(:eligible?).with("event_2", "app_id_2").and_return(false)
stub_application_setting(gitlab_dedicated_instance?: true)
end

View File

@ -28,6 +28,12 @@ RSpec.describe Ci::ResourceGroups::AssignResourceFromResourceGroupService, featu
expect(ci_build.resource).to be_present
end
it_behaves_like 'internal event tracking' do
let(:event) { 'job_enqueued_by_resource_group' }
let(:category) { described_class.name }
let(:additional_properties) { { label: resource_group.process_mode, property: ci_build.id.to_s, resource_group_id: resource_group.id } }
end
context 'when failed to request resource' do
before do
allow_next_instance_of(Ci::Build) do |job|

View File

@ -1 +1 @@
golang 1.23.2
golang 1.24.2

View File

@ -7,8 +7,7 @@ internal/api/channel_settings.go:57:28: G402: TLS MinVersion too low. (gosec)
internal/channel/channel.go:128:31: response body must be closed (bodyclose)
internal/config/config.go:247:18: G204: Subprocess launched with variable (gosec)
internal/config/config.go:339:8: G101: Potential hardcoded credentials (gosec)
internal/dependencyproxy/dependencyproxy.go:127: Function 'Inject' is too long (65 > 60) (funlen)
internal/dependencyproxy/dependencyproxy_test.go:576: internal/dependencyproxy/dependencyproxy_test.go:576: Line contains TODO/BUG/FIXME/NOTE/OPTIMIZE/HACK: "note that the timeout duration here is s..." (godox)
internal/dependencyproxy/dependencyproxy_test.go:572: internal/dependencyproxy/dependencyproxy_test.go:572: Line contains TODO/BUG/FIXME/NOTE/OPTIMIZE/HACK: "note that the timeout duration here is s..." (godox)
internal/git/archive.go:67: Function 'Inject' has too many statements (55 > 40) (funlen)
internal/git/blob.go:21:5: exported: exported var SendBlob should have comment or be unexported (revive)
internal/git/diff.go:1: 1-47 lines are duplicate of `internal/git/format-patch.go:1-48` (dupl)
@ -33,18 +32,18 @@ internal/git/upload-pack.go:37:16: Error return value of `cw.Flush` is not check
internal/git/upload-pack_test.go:72:2: error-is-as: use require.ErrorIs (testifylint)
internal/headers/headers.go:10: internal/headers/headers.go:10: Line contains TODO/BUG/FIXME/NOTE/OPTIMIZE/HACK: "Fixme: Go back to 512 bytes once https:/..." (godox)
internal/imageresizer/image_resizer.go:1:1: package-comments: should have a package comment (revive)
internal/imageresizer/image_resizer.go:33:6: exported: exported type Resizer should have comment or be unexported (revive)
internal/imageresizer/image_resizer.go:144:1: exported: exported function NewResizer should have comment or be unexported (revive)
internal/imageresizer/image_resizer.go:152: Function 'Inject' is too long (72 > 60) (funlen)
internal/imageresizer/image_resizer.go:182:30: Error return value of `imageFile.reader.Close` is not checked (errcheck)
internal/imageresizer/image_resizer.go:184:35: G115: integer overflow conversion uint -> int (gosec)
internal/imageresizer/image_resizer.go:205:32: Error return value of `command.KillProcessGroup` is not checked (errcheck)
internal/imageresizer/image_resizer.go:266:28: G115: integer overflow conversion uint64 -> int64 (gosec)
internal/imageresizer/image_resizer.go:274:41: G115: integer overflow conversion uint32 -> int32 (gosec)
internal/imageresizer/image_resizer.go:316:46: G115: integer overflow conversion uint -> int (gosec)
internal/imageresizer/image_resizer.go:356:17: Error return value of `res.Body.Close` is not checked (errcheck)
internal/imageresizer/image_resizer.go:362:15: G304: Potential file inclusion via variable (gosec)
internal/imageresizer/image_resizer.go:369:13: Error return value of `file.Close` is not checked (errcheck)
internal/imageresizer/image_resizer.go:30:6: exported: exported type Resizer should have comment or be unexported (revive)
internal/imageresizer/image_resizer.go:141:1: exported: exported function NewResizer should have comment or be unexported (revive)
internal/imageresizer/image_resizer.go:149: Function 'Inject' is too long (65 > 60) (funlen)
internal/imageresizer/image_resizer.go:172:30: Error return value of `imageFile.reader.Close` is not checked (errcheck)
internal/imageresizer/image_resizer.go:174:35: G115: integer overflow conversion uint -> int (gosec)
internal/imageresizer/image_resizer.go:195:32: Error return value of `command.KillProcessGroup` is not checked (errcheck)
internal/imageresizer/image_resizer.go:256:28: G115: integer overflow conversion uint64 -> int64 (gosec)
internal/imageresizer/image_resizer.go:264:41: G115: integer overflow conversion uint32 -> int32 (gosec)
internal/imageresizer/image_resizer.go:306:46: G115: integer overflow conversion uint -> int (gosec)
internal/imageresizer/image_resizer.go:350:17: Error return value of `res.Body.Close` is not checked (errcheck)
internal/imageresizer/image_resizer.go:356:15: G304: Potential file inclusion via variable (gosec)
internal/imageresizer/image_resizer.go:363:13: Error return value of `file.Close` is not checked (errcheck)
internal/imageresizer/image_resizer_caching.go:6:1: ST1000: at least one file in a package should have a package comment (stylecheck)
internal/proxy/proxy.go:142:14: SA6002: argument should be pointer-like to avoid allocations (staticcheck)
internal/senddata/contentprocessor/contentprocessor.go:136:35: response body must be closed (bodyclose)
@ -53,9 +52,9 @@ internal/testhelper/gitaly.go:277: 277-296 lines are duplicate of `internal/test
internal/testhelper/gitaly.go:315: 315-336 lines are duplicate of `internal/testhelper/gitaly.go:338-357` (dupl)
internal/testhelper/gitaly.go:338: 338-357 lines are duplicate of `internal/testhelper/gitaly.go:277-296` (dupl)
internal/testhelper/testhelper.go:18:2: import 'github.com/dlclark/regexp2' is not allowed from list 'main' (depguard)
internal/testhelper/testhelper.go:245:21: G302: Expect file permissions to be 0600 or less (gosec)
internal/testhelper/testhelper.go:258:39: G115: integer overflow conversion uintptr -> int (gosec)
internal/testhelper/testhelper.go:272:39: G115: integer overflow conversion uintptr -> int (gosec)
internal/testhelper/testhelper.go:243:21: G302: Expect file permissions to be 0600 or less (gosec)
internal/testhelper/testhelper.go:256:39: G115: integer overflow conversion uintptr -> int (gosec)
internal/testhelper/testhelper.go:270:39: G115: integer overflow conversion uintptr -> int (gosec)
internal/transport/transport.go:144:1: cognitive complexity 38 of func `validateIPAddress` is high (> 20) (gocognit)
internal/upload/artifacts_upload_test.go:49:1: cognitive complexity 32 of func `testArtifactsUploadServer` is high (> 20) (gocognit)
internal/upload/artifacts_uploader.go:82:11: G204: Subprocess launched with a potential tainted input or cmd arguments (gosec)
@ -69,9 +68,9 @@ internal/upload/destination/objectstore/upload_strategy.go:29: internal/upload/d
internal/upload/destination/objectstore/uploader.go:5:2: G501: Blocklisted import crypto/md5: weak cryptographic primitive (gosec)
internal/upload/destination/objectstore/uploader.go:95:12: G401: Use of weak cryptographic primitive (gosec)
internal/upload/exif/exif.go:103:10: G204: Subprocess launched with variable (gosec)
internal/upstream/routes.go:179:74: `(*upstream).wsRoute` - `matchers` always receives `nil` (unparam)
internal/upstream/routes.go:239: Function 'configureRoutes' is too long (342 > 60) (funlen)
internal/upstream/routes.go:497: internal/upstream/routes.go:497: Line contains TODO/BUG/FIXME/NOTE/OPTIMIZE/HACK: "TODO: We should probably not return a HT..." (godox)
internal/upstream/routes.go:172:74: `(*upstream).wsRoute` - `matchers` always receives `nil` (unparam)
internal/upstream/routes.go:232: Function 'configureRoutes' is too long (342 > 60) (funlen)
internal/upstream/routes.go:490: internal/upstream/routes.go:490: Line contains TODO/BUG/FIXME/NOTE/OPTIMIZE/HACK: "TODO: We should probably not return a HT..." (godox)
internal/upstream/upstream.go:116: internal/upstream/upstream.go:116: Line contains TODO/BUG/FIXME/NOTE/OPTIMIZE/HACK: "TODO: move to LabKit https://gitlab.com/..." (godox)
internal/zipartifacts/metadata.go:118:54: G115: integer overflow conversion int -> uint32 (gosec)
internal/zipartifacts/open_archive.go:74:28: response body must be closed (bodyclose)
internal/zipartifacts/open_archive.go:78:28: response body must be closed (bodyclose)

View File

@ -11,17 +11,12 @@ import (
"os"
"os/exec"
"path/filepath"
"strconv"
"strings"
"syscall"
"gitlab.com/gitlab-org/labkit/log"
"gitlab.com/gitlab-org/labkit/mask"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/metrics"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper/command"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper/fail"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/senddata"
@ -42,13 +37,6 @@ func (e *entry) Inject(w http.ResponseWriter, r *http.Request, sendData string)
return
}
if helper.IsURL(params.Archive) {
// Get the tracker from context and set flags
if tracker, ok := metrics.FromContext(r.Context()); ok {
tracker.SetFlag(metrics.KeyFetchedExternalURL, strconv.FormatBool(true))
}
}
log.WithContextFields(r.Context(), log.Fields{
"entry": params.Entry,
"archive": mask.URL(params.Archive),

View File

@ -13,8 +13,6 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
)
@ -26,13 +24,8 @@ func testEntryServer(t *testing.T, archive string, entry string) *httptest.Respo
encodedEntry := base64.StdEncoding.EncodeToString([]byte(entry))
jsonParams := fmt.Sprintf(`{"Archive":"%s","Entry":"%s"}`, archive, encodedEntry)
data := base64.URLEncoding.EncodeToString([]byte(jsonParams))
if helper.IsURL(archive) {
r = testhelper.RequestWithMetrics(t, r)
}
SendEntry.Inject(w, r, data)
if helper.IsURL(archive) {
testhelper.AssertMetrics(t, r)
}
})
httpRequest, err := http.NewRequest("GET", "/url/path", nil)

View File

@ -9,15 +9,12 @@ import (
"net/http"
"net/url"
"os"
"strconv"
"strings"
"sync"
"time"
"gitlab.com/gitlab-org/labkit/log"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/metrics"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/forwardheaders"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper/fail"
@ -125,11 +122,6 @@ func (p *Injector) SetUploadHandler(uploadHandler upload.BodyUploadHandler) {
// Inject performs the injection of dependencies
func (p *Injector) Inject(w http.ResponseWriter, r *http.Request, sendData string) {
// Get the tracker from context and set flags
if tracker, ok := metrics.FromContext(r.Context()); ok {
tracker.SetFlag(metrics.KeyFetchedExternalURL, strconv.FormatBool(true))
}
params, err := p.unpackParams(sendData)
if err != nil {
fail.Request(w, r, err)

View File

@ -134,13 +134,9 @@ func TestInject(t *testing.T) {
r := httptest.NewRequest("GET", "/target", nil)
sendData := base64.StdEncoding.EncodeToString([]byte(tokenJSON + originResourceServer.URL + urlJSON))
// add metrics tracker
r = testhelper.RequestWithMetrics(t, r)
injector.Inject(tc.responseWriter, r, sendData)
require.Equal(t, tc.handlerMustBeCalled, handlerIsCalled, "a partial file must not be saved")
testhelper.AssertMetrics(t, r)
}
}
@ -174,7 +170,7 @@ func TestSuccessfullRequest(t *testing.T) {
injector := NewInjector()
injector.SetUploadHandler(uploadHandler)
response := makeRequest(t, injector, tokenJSON+originResourceServer.URL+urlJSON)
response := makeRequest(injector, tokenJSON+originResourceServer.URL+urlJSON)
require.Equal(t, "/target/upload", uploadHandler.request.URL.Path)
require.Equal(t, int64(6), uploadHandler.request.ContentLength)
@ -295,7 +291,7 @@ func TestValidUploadConfiguration(t *testing.T) {
sendDataJSONString, err := json.Marshal(sendData)
require.NoError(t, err)
response := makeRequest(t, injector, string(sendDataJSONString))
response := makeRequest(injector, string(sendDataJSONString))
// check the response
require.Equal(t, 200, response.Code)
@ -349,7 +345,7 @@ func TestInvalidUploadConfiguration(t *testing.T) {
sendDataJSONString, err := json.Marshal(tc.sendData)
require.NoError(t, err)
response := makeRequest(t, NewInjector(), string(sendDataJSONString))
response := makeRequest(NewInjector(), string(sendDataJSONString))
require.Equal(t, 500, response.Code)
require.Equal(t, "Internal Server Error\n", response.Body.String())
@ -382,7 +378,7 @@ func TestTimeoutConfiguration(t *testing.T) {
sendDataJSONString, err := json.Marshal(sendData)
require.NoError(t, err)
response := makeRequest(t, injector, string(sendDataJSONString))
response := makeRequest(injector, string(sendDataJSONString))
responseResult := response.Result()
defer responseResult.Body.Close()
require.Equal(t, http.StatusGatewayTimeout, responseResult.StatusCode)
@ -400,7 +396,7 @@ func TestSSRFFilter(t *testing.T) {
sendDataJSONString, err := json.Marshal(sendData)
require.NoError(t, err)
response := makeRequest(t, NewInjector(), string(sendDataJSONString))
response := makeRequest(NewInjector(), string(sendDataJSONString))
// Test uses loopback IP like 127.0.0.x and thus fails
require.Equal(t, http.StatusForbidden, response.Code)
@ -429,7 +425,7 @@ func TestSSRFFilterWithAllowLocalhost(t *testing.T) {
injector := NewInjector()
injector.SetUploadHandler(uploadHandler)
response := makeRequest(t, injector, string(sendDataJSONString))
response := makeRequest(injector, string(sendDataJSONString))
require.Equal(t, http.StatusOK, response.Code)
}
@ -475,7 +471,7 @@ func TestRestrictForwardedResponseHeaders(t *testing.T) {
},
})
response := makeRequest(t, injector, entryParamsJSON)
response := makeRequest(injector, entryParamsJSON)
require.Equal(t, "/target/upload", uploadHandler.request.URL.Path)
require.Equal(t, int64(6), uploadHandler.request.ContentLength)
@ -500,14 +496,14 @@ func jsonEntryParams(t *testing.T, params *map[string]interface{}) string {
}
func TestIncorrectSendData(t *testing.T) {
response := makeRequest(t, NewInjector(), "")
response := makeRequest(NewInjector(), "")
require.Equal(t, 500, response.Code)
require.Equal(t, "Internal Server Error\n", response.Body.String())
}
func TestIncorrectSendDataUrl(t *testing.T) {
response := makeRequest(t, NewInjector(), `{"Token": "token", "Url": "url"}`)
response := makeRequest(NewInjector(), `{"Token": "token", "Url": "url"}`)
require.Equal(t, http.StatusBadGateway, response.Code)
require.Equal(t, "Bad Gateway\n", response.Body.String())
@ -528,7 +524,7 @@ func TestFailedOriginServer(t *testing.T) {
injector := NewInjector()
injector.SetUploadHandler(uploadHandler)
response := makeRequest(t, injector, tokenJSON+originResourceServer.URL+urlJSON)
response := makeRequest(injector, tokenJSON+originResourceServer.URL+urlJSON)
require.Equal(t, 404, response.Code)
require.Equal(t, "Not found", response.Body.String())
@ -579,7 +575,7 @@ func TestLongUploadRequest(t *testing.T) {
r := httptest.NewRequest("GET", uploadServer.URL+"/upload", nil).WithContext(ctx)
r.Header.Set("Overridden-Header", "request")
response := makeCustomRequest(t, injector, `{"Token": "token", "Url": "`+originResourceServer.URL+`/upstream"}`, r)
response := makeCustomRequest(injector, `{"Token": "token", "Url": "`+originResourceServer.URL+`/upstream"}`, r)
// wait for the slow upload to finish
require.Equal(t, http.StatusOK, response.Code)
@ -605,7 +601,7 @@ func TestHttpClientReuse(t *testing.T) {
injector := NewInjector()
injector.SetUploadHandler(uploadHandler)
response := makeRequest(t, injector, tokenJSON+originResourceServer.URL+urlJSON)
response := makeRequest(injector, tokenJSON+originResourceServer.URL+urlJSON)
require.Equal(t, http.StatusOK, response.Code)
_, found := httpClients.Load(expectedKey)
require.True(t, found)
@ -616,21 +612,17 @@ func TestHttpClientReuse(t *testing.T) {
require.NotEqual(t, cachedClient(&entryParams{SSRFFilter: true}), storedClient)
}
func makeRequest(t *testing.T, injector *Injector, data string) *httptest.ResponseRecorder {
func makeRequest(injector *Injector, data string) *httptest.ResponseRecorder {
r := httptest.NewRequest("GET", "/target", nil)
r.Header.Set("Overridden-Header", "request")
return makeCustomRequest(t, injector, data, r)
return makeCustomRequest(injector, data, r)
}
func makeCustomRequest(t *testing.T, injector *Injector, data string, r *http.Request) *httptest.ResponseRecorder {
// add metrics tracker
r = testhelper.RequestWithMetrics(t, r)
func makeCustomRequest(injector *Injector, data string, r *http.Request) *httptest.ResponseRecorder {
w := httptest.NewRecorder()
sendData := base64.StdEncoding.EncodeToString([]byte(data))
injector.Inject(w, r, sendData)
testhelper.AssertMetrics(t, r)
return w
}

View File

@ -7,7 +7,6 @@ import (
"net/url"
"os"
"path/filepath"
"strings"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/log"
)
@ -57,8 +56,3 @@ func IsContentType(expected, actual string) bool {
parsed, _, err := mime.ParseMediaType(actual)
return err == nil && parsed == expected
}
// IsURL checks if the given string starts with http:// or https://
func IsURL(path string) bool {
return strings.HasPrefix(path, "http://") || strings.HasPrefix(path, "https://")
}

View File

@ -17,9 +17,6 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/metrics"
"gitlab.com/gitlab-org/labkit/tracing"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
@ -166,13 +163,6 @@ func (r *Resizer) Inject(w http.ResponseWriter, req *http.Request, paramsData st
return
}
if helper.IsURL(params.Location) {
// Get the tracker from context and set flags
if tracker, ok := metrics.FromContext(req.Context()); ok {
tracker.SetFlag(metrics.KeyFetchedExternalURL, strconv.FormatBool(true))
}
}
imageFile, err := openSourceImage(params.Location)
if err != nil {
// This means we cannot even read the input image; fail fast.
@ -329,8 +319,12 @@ func startResizeImageCommand(ctx context.Context, imageReader io.Reader, params
return cmd, stdout, nil
}
func isURL(location string) bool {
return strings.HasPrefix(location, "http://") || strings.HasPrefix(location, "https://")
}
func openSourceImage(location string) (*imageFile, error) {
if helper.IsURL(location) {
if isURL(location) {
return openFromURL(location)
}

View File

@ -1,7 +1,6 @@
package imageresizer
import (
"bytes"
"encoding/base64"
"encoding/json"
"image"
@ -10,7 +9,6 @@ import (
"net/http"
"net/http/httptest"
"os"
"strconv"
"testing"
"time"
@ -232,77 +230,6 @@ func TestServeOriginalImageWhenSourceImageIsTooSmall(t *testing.T) {
require.Equal(t, content, responseData, "expected original image")
}
func TestResizeImageFromHTTPServer(t *testing.T) {
// Create a test server that serves a test image
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
// Open a test image file
file, err := os.Open("../../testdata/image.png")
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
return
}
defer file.Close()
// Get file info for content length and modification time
fileInfo, err := file.Stat()
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
return
}
// Set appropriate headers
w.Header().Set("Content-Type", "image/png")
w.Header().Set("Content-Length", strconv.FormatInt(fileInfo.Size(), 10))
w.Header().Set("Last-Modified", fileInfo.ModTime().UTC().Format(http.TimeFormat))
// Copy the file to the response
_, _ = io.Copy(w, file)
}))
defer ts.Close()
// Create resize parameters with the test server URL
params := resizeParams{
Location: ts.URL,
ContentType: "image/png",
Width: 100,
}
// Create a resizer with test configuration
resizer := NewResizer(config.Config{
ImageResizerConfig: config.ImageResizerConfig{
MaxScalerProcs: 1,
MaxFilesize: 10 * 1024 * 1024, // 10MB
},
})
paramsJSON := encodeParams(t, &params)
// Create a test request with metrics
req := httptest.NewRequest("GET", "/image", nil)
req = testhelper.RequestWithMetrics(t, req)
w := httptest.NewRecorder()
// Call the Inject method
resizer.Inject(w, req, paramsJSON)
// Check the response
resp := w.Result()
defer resp.Body.Close()
require.Equal(t, http.StatusOK, resp.StatusCode, "Expected status OK")
// Verify the response contains image data
body, err := io.ReadAll(resp.Body)
require.NoError(t, err, "Failed to read response body")
require.NotEmpty(t, body, "Response body is empty")
// Check if the response is a valid image
// For PNG, we can check for the PNG signature
require.True(t, bytes.HasPrefix(body, []byte(pngMagic)), "Response is not a valid PNG image")
// Assert metrics were recorded properly
testhelper.AssertMetrics(t, req)
}
// The Rails applications sends a Base64 encoded JSON string carrying
// these parameters in an HTTP response header
func encodeParams(t *testing.T, p *resizeParams) string {

View File

@ -1,61 +0,0 @@
// Package metrics is used to track various metrics and flags for incoming requests in GitLab Workhorse.
// This package provides utilities to manage request metadata, such as setting and retrieving arbitrary flags.
package metrics
import (
"context"
)
const (
// KeyFetchedExternalURL is a flag key used to track whether the request fetched an external URL.
KeyFetchedExternalURL = "fetched_external_url"
)
// RequestTracker is a simple container for request metadata and flags
type RequestTracker struct {
// Flags stores arbitrary string values for the request
Flags map[string]string
}
// NewRequestTracker creates a new RequestTracker
func NewRequestTracker() *RequestTracker {
return &RequestTracker{
Flags: make(map[string]string),
}
}
// SetFlag sets a flag value
func (rt *RequestTracker) SetFlag(key, value string) {
rt.Flags[key] = value
}
// GetFlag gets a flag value
func (rt *RequestTracker) GetFlag(key string) (string, bool) {
val, ok := rt.Flags[key]
return val, ok
}
// HasFlag returns true if the flag exists and equals the given value
func (rt *RequestTracker) HasFlag(key, value string) bool {
if val, ok := rt.Flags[key]; ok {
return val == value
}
return false
}
// Context key for storing the request tracker
type contextKey string
// TrackerKey is the key used to store the RequestTracker in context
const TrackerKey contextKey = "requestTracker"
// FromContext retrieves the RequestTracker from context
func FromContext(ctx context.Context) (*RequestTracker, bool) {
rt, ok := ctx.Value(TrackerKey).(*RequestTracker)
return rt, ok
}
// NewContext creates a new context with the RequestTracker
func NewContext(ctx context.Context, rt *RequestTracker) context.Context {
return context.WithValue(ctx, TrackerKey, rt)
}

View File

@ -1,87 +0,0 @@
package metrics
import (
"context"
"testing"
"github.com/stretchr/testify/require"
)
func TestSetAndGetFlag(t *testing.T) {
rt := NewRequestTracker()
// Test setting and getting a flag
rt.SetFlag("test_key", "test_value")
val, ok := rt.GetFlag("test_key")
require.True(t, ok)
require.Equal(t, "test_value", val)
// Test getting a non-existent flag
val, ok = rt.GetFlag("non_existent")
require.False(t, ok)
require.Empty(t, val)
// Test overwriting a flag
rt.SetFlag("test_key", "new_value")
val, ok = rt.GetFlag("test_key")
require.True(t, ok)
require.Equal(t, "new_value", val)
}
func TestHasFlag(t *testing.T) {
rt := NewRequestTracker()
// Set a flag
rt.SetFlag("test_key", "test_value")
// Test HasFlag with correct value
require.True(t, rt.HasFlag("test_key", "test_value"))
// Test HasFlag with incorrect value
require.False(t, rt.HasFlag("test_key", "wrong_value"))
// Test HasFlag with non-existent key
require.False(t, rt.HasFlag("non_existent", "any_value"))
}
func TestContextOperations(t *testing.T) {
// Create a base context
ctx := context.Background()
// Create a request tracker
rt := NewRequestTracker()
rt.SetFlag("test_key", "test_value")
// Test NewContext
ctxWithTracker := NewContext(ctx, rt)
require.NotEqual(t, ctx, ctxWithTracker)
// Test FromContext - successful retrieval
retrievedRT, ok := FromContext(ctxWithTracker)
require.True(t, ok)
// Verify the retrieved tracker has the expected flag
val, ok := retrievedRT.GetFlag("test_key")
require.True(t, ok)
require.Equal(t, "test_value", val)
// Test FromContext - no tracker in context
emptyRT, ok := FromContext(ctx)
require.False(t, ok)
require.Nil(t, emptyRT)
}
func TestPredefinedConstants(t *testing.T) {
// Test using the predefined constant
rt := NewRequestTracker()
rt.SetFlag(KeyFetchedExternalURL, "true")
val, ok := rt.GetFlag(KeyFetchedExternalURL)
require.True(t, ok)
require.Equal(t, "true", val)
require.True(t, rt.HasFlag(KeyFetchedExternalURL, "true"))
}

View File

@ -7,7 +7,6 @@ import (
"io"
"net/http"
"os"
"strconv"
"strings"
"sync"
"time"
@ -15,8 +14,6 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/metrics"
"gitlab.com/gitlab-org/labkit/mask"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
@ -120,11 +117,6 @@ func (e *entry) Inject(w http.ResponseWriter, r *http.Request, sendData string)
return
}
// Get the tracker from context and set flags
if tracker, ok := metrics.FromContext(r.Context()); ok {
tracker.SetFlag(metrics.KeyFetchedExternalURL, strconv.FormatBool(true))
}
setDefaultMethod(&params)
log.WithContextFields(r.Context(), log.Fields{

View File

@ -52,12 +52,7 @@ func testEntryServer(t *testing.T, requestURL string, httpHeaders http.Header, a
w.Header().Set("Date", "Wed, 21 Oct 2015 05:28:00 GMT")
w.Header().Set("Pragma", "no-cache")
// add metrics tracker
r = testhelper.RequestWithMetrics(t, r)
SendURL.Inject(w, r, data)
testhelper.AssertMetrics(t, r)
}
serveFile := func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, "GET", r.Method)
@ -244,11 +239,7 @@ func TestPostRequest(t *testing.T) {
data := base64.URLEncoding.EncodeToString(jsonParams)
// add metrics tracker
r = testhelper.RequestWithMetrics(t, r)
SendURL.Inject(w, r, data)
testhelper.AssertMetrics(t, r)
}
externalPostURLHandler := func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, "POST", r.Method)
@ -309,13 +300,9 @@ func TestErrorWithCustomStatusCode(t *testing.T) {
response := httptest.NewRecorder()
request := httptest.NewRequest("GET", "/target", nil)
// add metrics tracker
request = testhelper.RequestWithMetrics(t, request)
SendURL.Inject(response, request, data)
require.Equal(t, http.StatusTeapot, response.Code)
testhelper.AssertMetrics(t, request)
}
func TestHttpClientReuse(t *testing.T) {

View File

@ -19,8 +19,6 @@ import (
"github.com/golang-jwt/jwt/v5"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/metrics"
"gitlab.com/gitlab-org/labkit/log"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
@ -274,26 +272,3 @@ func WriteExecutable(tb testing.TB, path string, content []byte) string {
return path
}
// RequestWithMetrics wraps the given request with metrics tracking context.
func RequestWithMetrics(t *testing.T, r *http.Request) *http.Request {
t.Helper()
// add metrics tracker
tracker := metrics.NewRequestTracker()
ctx := metrics.NewContext(r.Context(), tracker)
return r.WithContext(ctx)
}
// AssertMetrics checks if the request has the expected metrics tracking and flags.
func AssertMetrics(t *testing.T, r *http.Request) {
t.Helper()
// check metrics
tracker, ok := metrics.FromContext(r.Context())
require.True(t, ok)
val, ok := tracker.GetFlag(metrics.KeyFetchedExternalURL)
require.True(t, ok)
require.Equal(t, "true", val)
}

View File

@ -1,7 +1,6 @@
package upstream
import (
"context"
"net/http"
"github.com/prometheus/client_golang/prometheus"
@ -9,8 +8,6 @@ import (
"github.com/prometheus/client_golang/prometheus/promhttp"
"gitlab.com/gitlab-org/labkit/metrics"
wm "gitlab.com/gitlab-org/gitlab/workhorse/internal/metrics"
)
const (
@ -31,7 +28,7 @@ var (
buildHandler = metrics.NewHandlerFactory(
metrics.WithNamespace(namespace),
metrics.WithLabels("route", "route_id", "backend_id", wm.KeyFetchedExternalURL))
metrics.WithLabels("route", "route_id", "backend_id"))
)
func instrumentRoute(next http.Handler, _ string, metadata routeMetadata) http.Handler {
@ -39,20 +36,7 @@ func instrumentRoute(next http.Handler, _ string, metadata routeMetadata) http.H
map[string]string{
"route": metadata.regexpStr,
"route_id": metadata.routeID,
"backend_id": string(metadata.backendID)}),
metrics.WithLabelFromContext(
wm.KeyFetchedExternalURL,
func(ctx context.Context) string {
if tracker, ok := wm.FromContext(ctx); ok {
val, ok := tracker.GetFlag(wm.KeyFetchedExternalURL)
if ok {
return val
}
}
return "false"
},
),
)
"backend_id": string(metadata.backendID)}))
}
func instrumentGeoProxyRoute(next http.Handler, _ string, metadata routeMetadata) http.Handler {

View File

@ -21,7 +21,6 @@ import (
gobpkg "gitlab.com/gitlab-org/gitlab/workhorse/internal/gob"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/imageresizer"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/metrics"
proxypkg "gitlab.com/gitlab-org/gitlab/workhorse/internal/proxy"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/queueing"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
@ -120,32 +119,26 @@ func withAllowOrigins(pattern string) func(*routeOptions) {
}
func (u *upstream) observabilityMiddlewares(handler http.Handler, method string, metadata routeMetadata, opts *routeOptions) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
tracker := metrics.NewRequestTracker()
ctx := metrics.NewContext(r.Context(), tracker)
r = r.WithContext(ctx)
handler = log.AccessLogger(
handler,
log.WithAccessLogger(u.accessLogger),
log.WithTrustedProxies(u.TrustedCIDRsForXForwardedFor),
log.WithExtraFields(func(_ *http.Request) log.Fields {
return log.Fields{
"route": metadata.regexpStr, // This field matches the `route` label in Prometheus metrics
"route_id": metadata.routeID,
"backend_id": metadata.backendID,
}
}),
)
handler = log.AccessLogger(
handler,
log.WithAccessLogger(u.accessLogger),
log.WithTrustedProxies(u.TrustedCIDRsForXForwardedFor),
log.WithExtraFields(func(_ *http.Request) log.Fields {
return log.Fields{
"route": metadata.regexpStr, // This field matches the `route` label in Prometheus metrics
"route_id": metadata.routeID,
"backend_id": metadata.backendID,
}
}),
)
handler = instrumentRoute(handler, method, metadata) // Add prometheus metrics
handler = instrumentRoute(handler, method, metadata) // Add prometheus metrics
if opts != nil && opts.isGeoProxyRoute {
handler = instrumentGeoProxyRoute(handler, method, metadata) // Add Geo prometheus metrics
}
if opts != nil && opts.isGeoProxyRoute {
handler = instrumentGeoProxyRoute(handler, method, metadata) // Add Geo prometheus metrics
}
handler.ServeHTTP(w, r)
})
return handler
}
func (u *upstream) route(method string, metadata routeMetadata, handler http.Handler, opts ...func(*routeOptions)) routeEntry {

View File

@ -8,8 +8,8 @@ import (
"net/http"
"os"
"path/filepath"
"strings"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/httprs"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/transport"
@ -56,13 +56,17 @@ func OpenArchiveWithReaderFunc(ctx context.Context, location string, readerFunc
}
func openArchiveLocation(ctx context.Context, location string) (*archive, error) {
if helper.IsURL(location) {
if isURL(location) {
return openHTTPArchive(ctx, location)
}
return openFileArchive(ctx, location)
}
func isURL(path string) bool {
return strings.HasPrefix(path, "http://") || strings.HasPrefix(path, "https://")
}
func openHTTPArchive(ctx context.Context, archivePath string) (*archive, error) {
scrubbedArchivePath := mask.URL(archivePath)
req, err := http.NewRequest(http.MethodGet, archivePath, nil)