Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-11-26 00:26:35 +00:00
parent 171554398f
commit aa54e0c45f
97 changed files with 1075 additions and 564 deletions

View File

@ -10,7 +10,6 @@
- unknown_failure
exit_codes:
- 111 # low free disk space https://gitlab.com/gitlab-org/gitlab/-/issues/498142
- 112 # Known flaky tests
.default-utils-before_script:
before_script:

View File

@ -18,21 +18,6 @@ Layout/LineLength:
- 'app/controllers/concerns/membership_actions.rb'
- 'app/controllers/concerns/notes_actions.rb'
- 'app/controllers/groups/milestones_controller.rb'
- 'app/controllers/groups_controller.rb'
- 'app/controllers/import/base_controller.rb'
- 'app/controllers/import/bitbucket_controller.rb'
- 'app/controllers/import/bitbucket_server_controller.rb'
- 'app/controllers/import/bulk_imports_controller.rb'
- 'app/controllers/import/fogbugz_controller.rb'
- 'app/controllers/import/gitea_controller.rb'
- 'app/controllers/import/gitlab_groups_controller.rb'
- 'app/controllers/import/gitlab_projects_controller.rb'
- 'app/controllers/invites_controller.rb'
- 'app/controllers/jira_connect/events_controller.rb'
- 'app/controllers/jira_connect/subscriptions_controller.rb'
- 'app/controllers/jwt_controller.rb'
- 'app/controllers/omniauth_callbacks_controller.rb'
- 'app/controllers/profiles/chat_names_controller.rb'
- 'app/controllers/projects/issues_controller.rb'
- 'app/controllers/projects/jobs_controller.rb'
- 'app/controllers/projects/labels_controller.rb'

View File

@ -1 +1 @@
b1891e7ed098a0628dda95037f63d7f145b65adc
8772e7bbd79d8fd2ba9137eef510b22fa2f8382f

View File

@ -278,7 +278,7 @@ gem 'rack', '~> 2.2.9' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'rack-timeout', '~> 0.7.0', require: 'rack/timeout/base' # rubocop:todo Gemfile/MissingFeatureCategory
group :puma do
gem 'puma', '= 6.4.3', require: false, feature_category: :shared
gem 'puma', '= 6.5.0', require: false, feature_category: :shared
gem 'sd_notify', '~> 0.1.0', require: false # rubocop:todo Gemfile/MissingFeatureCategory
end

View File

@ -526,8 +526,8 @@
{"name":"pry-rails","version":"0.3.9","platform":"ruby","checksum":"468662575abb6b67f4a9831219f99290d5eae7bf186e64dd810d0a3e4a8cc4b1"},
{"name":"pry-shell","version":"0.6.4","platform":"ruby","checksum":"ad024882d29912b071a7de65ebea538b242d2dc1498c60c7c2352ef94769f208"},
{"name":"public_suffix","version":"6.0.1","platform":"ruby","checksum":"61d44e1cab5cbbbe5b31068481cf16976dd0dc1b6b07bd95617ef8c5e3e00c6f"},
{"name":"puma","version":"6.4.3","platform":"java","checksum":"373fcfacacaafd0f5a24db18cb99b3f2decb5c5316470169852559aa80adc8ab"},
{"name":"puma","version":"6.4.3","platform":"ruby","checksum":"24a4645c006811d83f2480057d1f54a96e7627b6b90e1c99b260b9dc630eb43e"},
{"name":"puma","version":"6.5.0","platform":"java","checksum":"a58eea585d291aa33796add9884208bc1591da5d8e61886f8ac74d080b298c40"},
{"name":"puma","version":"6.5.0","platform":"ruby","checksum":"94d1b75cab7f356d52e4f1b17b9040a090889b341dbeee6ee3703f441dc189f2"},
{"name":"pyu-ruby-sasl","version":"0.0.3.3","platform":"ruby","checksum":"5683a6bc5738db5a1bf5ceddeaf545405fb241b4184dd4f2587e679a7e9497e5"},
{"name":"raabro","version":"1.4.0","platform":"ruby","checksum":"d4fa9ff5172391edb92b242eed8be802d1934b1464061ae5e70d80962c5da882"},
{"name":"racc","version":"1.8.1","platform":"java","checksum":"54f2e6d1e1b91c154013277d986f52a90e5ececbe91465d29172e49342732b98"},

View File

@ -1458,7 +1458,7 @@ GEM
tty-markdown
tty-prompt
public_suffix (6.0.1)
puma (6.4.3)
puma (6.5.0)
nio4r (~> 2.0)
pyu-ruby-sasl (0.0.3.3)
raabro (1.4.0)
@ -2235,7 +2235,7 @@ DEPENDENCIES
pry-byebug
pry-rails (~> 0.3.9)
pry-shell (~> 0.6.4)
puma (= 6.4.3)
puma (= 6.5.0)
rack (~> 2.2.9)
rack-attack (~> 6.7.0)
rack-cors (~> 2.0.1)

View File

@ -533,8 +533,8 @@
{"name":"psych","version":"5.2.0","platform":"java","checksum":"da3a7995e652365faa210d7658a291141c9a15bf05a4d9a48a13856b04f36960"},
{"name":"psych","version":"5.2.0","platform":"ruby","checksum":"6603fe756bcaf14daa25bc17625f36c90931dcf70452ac1e8da19760dc310573"},
{"name":"public_suffix","version":"6.0.1","platform":"ruby","checksum":"61d44e1cab5cbbbe5b31068481cf16976dd0dc1b6b07bd95617ef8c5e3e00c6f"},
{"name":"puma","version":"6.4.3","platform":"java","checksum":"373fcfacacaafd0f5a24db18cb99b3f2decb5c5316470169852559aa80adc8ab"},
{"name":"puma","version":"6.4.3","platform":"ruby","checksum":"24a4645c006811d83f2480057d1f54a96e7627b6b90e1c99b260b9dc630eb43e"},
{"name":"puma","version":"6.5.0","platform":"java","checksum":"a58eea585d291aa33796add9884208bc1591da5d8e61886f8ac74d080b298c40"},
{"name":"puma","version":"6.5.0","platform":"ruby","checksum":"94d1b75cab7f356d52e4f1b17b9040a090889b341dbeee6ee3703f441dc189f2"},
{"name":"pyu-ruby-sasl","version":"0.0.3.3","platform":"ruby","checksum":"5683a6bc5738db5a1bf5ceddeaf545405fb241b4184dd4f2587e679a7e9497e5"},
{"name":"raabro","version":"1.4.0","platform":"ruby","checksum":"d4fa9ff5172391edb92b242eed8be802d1934b1464061ae5e70d80962c5da882"},
{"name":"racc","version":"1.8.1","platform":"java","checksum":"54f2e6d1e1b91c154013277d986f52a90e5ececbe91465d29172e49342732b98"},

View File

@ -1475,7 +1475,7 @@ GEM
psych (5.2.0)
stringio
public_suffix (6.0.1)
puma (6.4.3)
puma (6.5.0)
nio4r (~> 2.0)
pyu-ruby-sasl (0.0.3.3)
raabro (1.4.0)
@ -2262,7 +2262,7 @@ DEPENDENCIES
pry-byebug
pry-rails (~> 0.3.9)
pry-shell (~> 0.6.4)
puma (= 6.4.3)
puma (= 6.5.0)
rack (~> 2.2.9)
rack-attack (~> 6.7.0)
rack-cors (~> 2.0.1)

View File

@ -17,7 +17,7 @@ export const COMMENT_FORM = {
bodyPlaceholderInternal: __('Write an internal note or drag your files here…'),
internal: s__('Notes|Make this an internal note'),
internalVisibility: s__(
'Notes|Internal notes are only visible to members with the role of Reporter or higher',
'Notes|Internal notes are only visible to members with the role of Planner or higher',
),
discussionThatNeedsResolution: __(
'Discuss a specific suggestion or question that needs to be resolved.',

View File

@ -94,9 +94,15 @@ class GroupsController < Groups::ApplicationController
successful_creation_hooks
notice = if @group.chat_team.present?
format(_("Group %{group_name} and its Mattermost team were successfully created."), group_name: @group.name)
format(
_("Group %{group_name} and its Mattermost team were successfully created."),
group_name: @group.name
)
else
format(_("Group %{group_name} was successfully created."), group_name: @group.name)
format(
_("Group %{group_name} was successfully created."),
group_name: @group.name
)
end
redirect_to @group, notice: notice
@ -207,7 +213,8 @@ class GroupsController < Groups::ApplicationController
)
if export_service.async_execute
redirect_to edit_group_path(@group), notice: _('Group export started. A download link will be sent by email and made available on this page.')
redirect_to edit_group_path(@group),
notice: _('Group export started. A download link will be sent by email and made available on this page.')
else
redirect_to edit_group_path(@group), alert: _('Group export could not be started.')
end
@ -220,7 +227,9 @@ class GroupsController < Groups::ApplicationController
send_upload(export_file, attachment: export_file.filename)
else
redirect_to edit_group_path(@group),
alert: _('The file containing the export is not available yet; it may still be transferring. Please try again later.')
alert: _(
'The file containing the export is not available yet; it may still be transferring. Please try again later.'
)
end
else
redirect_to edit_group_path(@group),

View File

@ -3,7 +3,13 @@
class Import::BaseController < ApplicationController
include ActionView::Helpers::SanitizeHelper
before_action -> { check_rate_limit!(:project_import, scope: [current_user, :project_import], redirect_back: true) }, only: [:create]
before_action -> {
check_rate_limit!(
:project_import,
scope: [current_user, :project_import],
redirect_back: true
)
}, only: [:create]
feature_category :importers
urgency :low
@ -65,11 +71,23 @@ class Import::BaseController < ApplicationController
end
def serialized_provider_repos
Import::ProviderRepoSerializer.new(current_user: current_user).represent(importable_repos, provider: provider_name, provider_url: provider_url, **extra_representation_opts)
Import::ProviderRepoSerializer.new(current_user: current_user)
.represent(
importable_repos,
provider: provider_name,
provider_url: provider_url,
**extra_representation_opts
)
end
def serialized_incompatible_repos
Import::ProviderRepoSerializer.new(current_user: current_user).represent(incompatible_repos, provider: provider_name, provider_url: provider_url, **extra_representation_opts)
Import::ProviderRepoSerializer.new(current_user: current_user)
.represent(
incompatible_repos,
provider: provider_name,
provider_url: provider_url,
**extra_representation_opts
)
end
def serialized_imported_projects
@ -92,7 +110,11 @@ class Import::BaseController < ApplicationController
return current_user.namespace if names == owner
group = Groups::NestedCreateService.new(current_user, organization_id: Current.organization_id, group_path: names).execute
group = Groups::NestedCreateService.new(
current_user,
organization_id: Current.organization_id,
group_path: names
).execute
group.errors.any? ? current_user.namespace : group
rescue StandardError => e

View File

@ -18,7 +18,10 @@ class Import::BitbucketController < Import::BaseController
if auth_state.blank? || !ActiveSupport::SecurityUtils.secure_compare(auth_state, params[:state])
go_to_bitbucket_for_permissions
else
response = oauth_client.auth_code.get_token(params[:code], redirect_uri: users_import_bitbucket_callback_url(namespace_id: params[:namespace_id]))
response = oauth_client.auth_code.get_token(
params[:code],
redirect_uri: users_import_bitbucket_callback_url(namespace_id: params[:namespace_id])
)
session[:bitbucket_token] = response.token
session[:bitbucket_expires_at] = response.expires_at
@ -62,7 +65,13 @@ class Import::BitbucketController < Import::BaseController
# Bitbucket::Connection class refreshes it.
session[:bitbucket_token] = bitbucket_client.connection.token
project = Gitlab::BitbucketImport::ProjectCreator.new(repo, project_name, target_namespace, current_user, credentials).execute
project = Gitlab::BitbucketImport::ProjectCreator.new(
repo,
project_name,
target_namespace,
current_user,
credentials
).execute
if project.persisted?
render json: ProjectSerializer.new.represent(project, serializer: :import)
@ -70,7 +79,8 @@ class Import::BitbucketController < Import::BaseController
render json: { errors: project_save_error(project) }, status: :unprocessable_entity
end
else
render json: { errors: s_('BitbucketImport|You are not allowed to import projects in this namespace.') }, status: :unprocessable_entity
render json: { errors: s_('BitbucketImport|You are not allowed to import projects in this namespace.') },
status: :unprocessable_entity
end
end
@ -143,7 +153,10 @@ class Import::BitbucketController < Import::BaseController
def go_to_bitbucket_for_permissions
state = SecureRandom.base64(64)
session[:bitbucket_auth_state] = state
redirect_to oauth_client.auth_code.authorize_url(redirect_uri: users_import_bitbucket_callback_url(namespace_id: params[:namespace_id]), state: state)
redirect_to oauth_client.auth_code.authorize_url(
redirect_uri: users_import_bitbucket_callback_url(namespace_id: params[:namespace_id]),
state: state
)
end
def bitbucket_unauthorized(exception)

View File

@ -4,6 +4,7 @@ class Import::BitbucketServerController < Import::BaseController
extend ::Gitlab::Utils::Override
include ActionView::Helpers::SanitizeHelper
include SafeFormatHelper
before_action :verify_bitbucket_server_import_enabled
before_action :bitbucket_auth, except: [:new, :configure]
@ -15,7 +16,8 @@ class Import::BitbucketServerController < Import::BaseController
# As a basic sanity check to prevent URL injection, restrict project
# repository input and repository slugs to allowed characters. For Bitbucket:
#
# Project keys must start with a letter and may only consist of ASCII letters, numbers and underscores (A-Z, a-z, 0-9, _).
# Project keys must start with a letter and may only consist of ASCII letters,
# numbers and underscores (A-Z, a-z, 0-9, _).
#
# Repository names are limited to 128 characters. They must start with a
# letter or number and may contain spaces, hyphens, underscores, and periods.
@ -31,10 +33,19 @@ class Import::BitbucketServerController < Import::BaseController
repo = client.repo(@project_key, @repo_slug)
unless repo
return render json: { errors: _("Project %{project_repo} could not be found") % { project_repo: "#{@project_key}/#{@repo_slug}" } }, status: :unprocessable_entity
return render json: {
errors: safe_format(
s_("Project %{project_repo} could not be found"),
project_repo: "#{@project_key}/#{@repo_slug}"
)
}, status: :unprocessable_entity
end
result = Import::BitbucketServerService.new(client, current_user, params.merge({ organization_id: Current.organization_id })).execute(credentials)
result = Import::BitbucketServerService.new(
client,
current_user,
params.merge({ organization_id: Current.organization_id })
).execute(credentials)
if result[:status] == :success
render json: ProjectSerializer.new.represent(result[:project], serializer: :import)
@ -87,7 +98,11 @@ class Import::BitbucketServerController < Import::BaseController
end
def bitbucket_repos
@bitbucket_repos ||= client.repos(page_offset: page_offset, limit: limit_per_page, filter: sanitized_filter_param).to_a
@bitbucket_repos ||= client.repos(
page_offset: page_offset,
limit: limit_per_page,
filter: sanitized_filter_param
).to_a
end
def normalize_import_params

View File

@ -69,7 +69,9 @@ class Import::BulkImportsController < ApplicationController
::BulkImports::CreateService.new(current_user, entry, credentials).execute
end
render json: responses.map { |response| { success: response.success?, id: response.payload[:id], message: response.message } }
render json: responses.map { |response|
{ success: response.success?, id: response.payload[:id], message: response.message }
}
end
def realtime_changes
@ -177,7 +179,8 @@ class Import::BulkImportsController < ApplicationController
rescue Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError => e
clear_session_data
redirect_to new_group_path(anchor: 'import-group-pane'), alert: _('Specified URL cannot be used: "%{reason}"') % { reason: e.message }
redirect_to new_group_path(anchor: 'import-group-pane'),
alert: _('Specified URL cannot be used: "%{reason}"') % { reason: e.message }
end
def allow_local_requests?

View File

@ -18,7 +18,8 @@ class Import::FogbugzController < Import::BaseController
res = Gitlab::FogbugzImport::Client.new(import_params.to_h.symbolize_keys)
rescue StandardError
# If the URI is invalid various errors can occur
return redirect_to new_import_fogbugz_path(namespace_id: params[:namespace_id]), alert: _('Could not connect to FogBugz, check your URL')
return redirect_to new_import_fogbugz_path(namespace_id: params[:namespace_id]),
alert: _('Could not connect to FogBugz, check your URL')
end
session[:fogbugz_token] = res.get_token.to_s
session[:fogbugz_uri] = params[:uri]

View File

@ -86,7 +86,11 @@ class Import::GiteaController < Import::GithubController
def client_options
verified_url, provider_hostname = verify_blocked_uri
{ host: verified_url.scheme == 'https' ? provider_url : verified_url.to_s, api_version: 'v1', hostname: provider_hostname }
{
host: verified_url.scheme == 'https' ? provider_url : verified_url.to_s,
api_version: 'v1',
hostname: provider_hostname
}
end
def verify_blocked_uri

View File

@ -10,7 +10,8 @@ class Import::GitlabGroupsController < ApplicationController
def create
unless file_is_valid?(group_params[:file])
return redirect_to new_group_path(anchor: 'import-group-pane'), alert: s_('GroupImport|Unable to process group import file')
return redirect_to new_group_path(anchor: 'import-group-pane'),
alert: s_('GroupImport|Unable to process group import file')
end
group_data = group_params
@ -36,7 +37,9 @@ class Import::GitlabGroupsController < ApplicationController
end
else
redirect_to new_group_path(anchor: 'import-group-pane'),
alert: s_("GroupImport|Group could not be imported: %{errors}") % { errors: group.errors.full_messages.to_sentence }
alert: s_("GroupImport|Group could not be imported: %{errors}") % {
errors: group.errors.full_messages.to_sentence
}
end
end

View File

@ -15,7 +15,11 @@ class Import::GitlabProjectsController < Import::BaseController
def create
unless file_is_valid?(project_params[:file])
return redirect_back_or_default(options: { alert: _("You need to upload a GitLab project export archive (ending in .gz).") })
return redirect_back_or_default(
options: {
alert: _("You need to upload a GitLab project export archive (ending in .gz).")
}
)
end
@project = ::Projects::GitlabProjectsImportService.new(current_user, project_params).execute
@ -26,7 +30,11 @@ class Import::GitlabProjectsController < Import::BaseController
notice: _("Project '%{project_name}' is being imported.") % { project_name: @project.name }
)
else
redirect_back_or_default(options: { alert: "Project could not be imported: #{@project.errors.full_messages.join(', ')}" })
redirect_back_or_default(
options: {
alert: "Project could not be imported: #{@project.errors.full_messages.join(', ')}"
}
)
end
end

View File

@ -84,7 +84,8 @@ class InvitesController < ApplicationController
if user_sign_up?
set_session_invite_params
redirect_to new_user_registration_path(invite_email: member.invite_email), notice: _("To accept this invitation, create an account or sign in.")
redirect_to new_user_registration_path(invite_email: member.invite_email),
notice: _("To accept this invitation, create an account or sign in.")
else
redirect_to new_user_session_path(sign_in_redirect_params), notice: sign_in_notice
end

View File

@ -17,7 +17,11 @@ class JiraConnect::EventsController < JiraConnect::ApplicationController
end
def uninstalled
if JiraConnectInstallations::DestroyService.execute(current_jira_installation, jira_connect_base_path, jira_connect_events_uninstalled_path)
if JiraConnectInstallations::DestroyService.execute(
current_jira_installation,
jira_connect_base_path,
jira_connect_events_uninstalled_path
)
head :ok
else
head :unprocessable_entity
@ -66,7 +70,11 @@ class JiraConnect::EventsController < JiraConnect::ApplicationController
end
def calculate_audiences
audiences = [Gitlab.config.jira_connect.enforce_jira_base_url_https ? jira_connect_base_url(protocol: 'https') : jira_connect_base_url]
audiences = if Gitlab.config.jira_connect.enforce_jira_base_url_https
[jira_connect_base_url(protocol: 'https')]
else
[jira_connect_base_url]
end
if (additional_url = Gitlab::CurrentSettings.jira_connect_additional_audience_url).present?
audiences << Gitlab::Utils.append_path(additional_url, "-/jira_connect")

View File

@ -65,7 +65,12 @@ class JiraConnect::SubscriptionsController < JiraConnect::ApplicationController
end
def create_service
JiraConnectSubscriptions::CreateService.new(current_jira_installation, current_user, namespace_path: params['namespace_path'], jira_user: jira_user)
JiraConnectSubscriptions::CreateService.new(
current_jira_installation,
current_user,
namespace_path: params['namespace_path'],
jira_user: jira_user
)
end
def destroy_service

View File

@ -34,7 +34,12 @@ class JwtController < ApplicationController
private
def authenticate_project_or_user
@authentication_result = Gitlab::Auth::Result.new(nil, nil, :none, Gitlab::Auth.read_only_authentication_abilities)
@authentication_result = Gitlab::Auth::Result.new(
nil,
nil,
:none,
Gitlab::Auth.read_only_authentication_abilities
)
authenticate_with_http_basic do |login, password|
@authentication_result = Gitlab::Auth.find_for_git_client(login, password, project: nil, request: request)
@ -70,10 +75,15 @@ class JwtController < ApplicationController
)
render(
json: { errors: [{
code: 'UNAUTHORIZED',
message: format(_("HTTP Basic: Access denied. If a password was provided for Git authentication, the password was incorrect or you're required to use a token instead of a password. If a token was provided, it was either incorrect, expired, or improperly scoped. See %{help_page_url}"), help_page_url: help_page)
}] },
json: {
errors: [{
code: 'UNAUTHORIZED',
message: format(_("HTTP Basic: Access denied. If a password was provided for Git authentication, the " \
"password was incorrect or you're required to use a token instead of a password. If a " \
"token was provided, it was either incorrect, expired, or improperly scoped. See " \
"%{help_page_url}"), help_page_url: help_page)
}]
},
status: :unauthorized
)
end

View File

@ -182,7 +182,8 @@ class OmniauthCallbacksController < Devise::OmniauthCallbacksController
end
def redirect_identity_link_failed(error_message)
redirect_to profile_account_path, notice: _("Authentication failed: %{error_message}") % { error_message: error_message }
redirect_to profile_account_path,
notice: _("Authentication failed: %{error_message}") % { error_message: error_message }
end
def redirect_identity_linked
@ -234,7 +235,8 @@ class OmniauthCallbacksController < Devise::OmniauthCallbacksController
else
if @user.deactivated?
@user.activate
flash[:notice] = _('Welcome back! Your account had been deactivated due to inactivity but is now reactivated.')
flash[:notice] =
_('Welcome back! Your account had been deactivated due to inactivity but is now reactivated.')
end
# session variable for storing bypass two-factor request from IDP
@ -262,12 +264,28 @@ class OmniauthCallbacksController < Devise::OmniauthCallbacksController
redirect_path = new_user_session_path
label = Gitlab::Auth::OAuth::Provider.label_for(oauth['provider'])
simple_url = Settings.gitlab.url.sub(%r{^https?://(www\.)?}i, '')
message = [_("Signing in using your %{label} account without a pre-existing account in %{simple_url} is not allowed.") % { label: label, simple_url: simple_url }]
message = [
_('Signing in using your %{label} account without a pre-existing ' \
'account in %{simple_url} is not allowed.') % {
label: label, simple_url: simple_url
}
]
if Gitlab::CurrentSettings.allow_signup?
redirect_path = new_user_registration_path
doc_pair = tag_pair(view_context.link_to('', help_page_path('user/profile/index.md', anchor: 'sign-in-services')), :doc_start, :doc_end)
message << safe_format(_("Create an account in %{simple_url} first, and then %{doc_start}connect it to your %{label} account%{doc_end}."), doc_pair, label: label, simple_url: simple_url)
doc_pair = tag_pair(view_context.link_to(
'',
help_page_path('user/profile/index.md', anchor: 'sign-in-services')),
:doc_start,
:doc_end
)
message << safe_format(
_('Create an account in %{simple_url} first, and then %{doc_start}connect it to ' \
'your %{label} account%{doc_end}.'),
doc_pair,
label: label,
simple_url: simple_url
)
end
flash[:alert] = message.join(' ').html_safe # rubocop:disable Rails/OutputSafety -- Generated message is safe
@ -308,7 +326,13 @@ class OmniauthCallbacksController < Devise::OmniauthCallbacksController
def handle_identity_with_untrusted_extern_uid
label = Gitlab::Auth::OAuth::Provider.label_for(oauth['provider'])
flash[:alert] = format(_("Signing in using your %{label} account has been disabled for security reasons. Please sign in to your GitLab account using another authentication method and reconnect to your %{label} account."), label: label)
flash[:alert] = format(
_('Signing in using your %{label} account has been disabled for security reasons. ' \
'Please sign in to your GitLab account using another authentication method and ' \
'reconnect to your %{label} account.'
),
label: label
)
redirect_to new_user_session_path
end

View File

@ -30,7 +30,8 @@ class Profiles::ChatNamesController < Profiles::ApplicationController
def deny
delete_chat_name_token
flash[:notice] = _("Denied authorization of chat nickname %{user_name}.") % { user_name: chat_name_params[:user_name] }
flash[:notice] =
_("Denied authorization of chat nickname %{user_name}.") % { user_name: chat_name_params[:user_name] }
redirect_to profile_chat_names_path
end

View File

@ -66,7 +66,7 @@ module Types
def job_token_policies
return unless Feature.enabled?(:add_policies_to_ci_job_token, object.source_project)
object.job_token_policies
object.job_token_policies&.map(&:to_sym)
end
end
# rubocop: enable Graphql/AuthorizeTypes

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
module Types
module Ci
module JobTokenScope
# rubocop: disable Graphql/AuthorizeTypes -- this is static data
class JobTokenPolicyCategoryType < BaseObject
graphql_name 'JobTokenPolicyCategory'
description 'Job token policy category type'
field :description, GraphQL::Types::String, description: 'Description of the category.'
field :policies, [Types::Ci::JobTokenScope::JobTokenPolicyType], description: 'Policies of the category.'
field :text, GraphQL::Types::String, description: 'Display text of the category.'
field :value, Types::Ci::JobTokenScope::PolicyCategoriesEnum, description: 'Value of the category.'
end
# rubocop: enable Graphql/AuthorizeTypes
end
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
module Types
module Ci
module JobTokenScope
# rubocop: disable Graphql/AuthorizeTypes -- this is static data
class JobTokenPolicyType < BaseObject
graphql_name 'JobTokenPolicy'
description 'Job token policy'
field :description, GraphQL::Types::String, description: 'Description of the job token policy.'
field :text, GraphQL::Types::String, description: 'Display text of the job token policy.'
field :type, Types::Ci::JobTokenScope::PolicyTypesEnum, description: 'Job token policy type.'
field :value, Types::Ci::JobTokenScope::PoliciesEnum, description: 'Value of the job token policy.'
end
# rubocop: enable Graphql/AuthorizeTypes
end
end
end

View File

@ -7,8 +7,8 @@ module Types
graphql_name 'CiJobTokenScopePolicies'
description 'CI_JOB_TOKEN policy'
::Ci::JobToken::Policies.all_values.each do |policy|
value policy.upcase, value: policy, description: policy.titleize
::Ci::JobToken::Policies.all_policies.each do |policy|
value policy[:value].to_s.upcase, value: policy[:value], description: policy[:description]
end
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
module Types
module Ci
module JobTokenScope
class PolicyCategoriesEnum < BaseEnum
graphql_name 'CiJobTokenScopePolicyCategoriesTypes'
description 'CI_JOB_TOKEN policy category type'
::Ci::JobToken::Policies::POLICIES_BY_CATEGORY.each do |category|
value category[:value].to_s.upcase, value: category[:value], description: category[:description]
end
end
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
module Types
module Ci
module JobTokenScope
class PolicyTypesEnum < BaseEnum
graphql_name 'CiJobTokenScopePolicyTypes'
description 'CI_JOB_TOKEN policy type'
value 'READ', value: :read, description: 'Read-only access to the resource.'
value 'ADMIN', value: :admin, description: 'Admin access to the resource.'
end
end
end
end

View File

@ -245,6 +245,10 @@ module Types
description: 'Check if a feature flag is enabled',
resolver: Resolvers::FeatureFlagResolver
field :job_token_policies_by_category, [::Types::Ci::JobTokenScope::JobTokenPolicyCategoryType],
description: 'List of job token policies for use with fine-grained permissions on CI/CD job allowlist.',
experiment: { milestone: '17.7' }
def design_management
DesignManagementObject.new(nil)
end
@ -298,6 +302,10 @@ module Types
stage
end
def job_token_policies_by_category
::Ci::JobToken::Policies::POLICIES_BY_CATEGORY
end
end
end

View File

@ -439,6 +439,7 @@ module ProjectsHelper
end
def show_lfs_misconfiguration_banner?(project)
return false unless Feature.enabled?(:lfs_misconfiguration_banner)
return false unless project.repository
return false unless project.lfs_enabled?

View File

@ -64,6 +64,7 @@ module Ci
def log_pipeline_being_canceled
Gitlab::AppJsonLogger.info(
class: self.class.to_s,
event: 'pipeline_cancel_running',
pipeline_id: pipeline.id,
auto_canceled_by_pipeline_id: @auto_canceled_by_pipeline&.id,

View File

@ -5,41 +5,22 @@
"items": {
"type": "string",
"enum": [
"admin_container_image",
"admin_secure_files",
"admin_terraform_state",
"build_create_container_image",
"build_destroy_container_image",
"build_download_code",
"build_push_code",
"build_read_container_image",
"create_deployment",
"create_environment",
"create_on_demand_dast_scan",
"create_package",
"create_release",
"destroy_container_image",
"destroy_deployment",
"destroy_environment",
"destroy_package",
"destroy_release",
"read_build",
"read_container_image",
"read_deployment",
"read_environment",
"read_group",
"read_job_artifacts",
"read_package",
"read_pipeline",
"read_project",
"read_release",
"read_containers",
"admin_containers",
"read_deployments",
"admin_deployments",
"read_environments",
"admin_environments",
"read_jobs",
"admin_jobs",
"read_packages",
"admin_packages",
"read_releases",
"admin_releases",
"read_secure_files",
"admin_secure_files",
"read_terraform_state",
"stop_environment",
"update_deployment",
"update_environment",
"update_pipeline",
"update_release"
"admin_terraform_state"
]
},
"uniqueItems": true,

View File

@ -0,0 +1,31 @@
diff --git a/app/helpers/projects_helper.rb b/app/helpers/projects_helper.rb
index 04591476b72d..aecb81bb664b 100644
--- a/app/helpers/projects_helper.rb
+++ b/app/helpers/projects_helper.rb
@@ -404,7 +404,6 @@ def show_terraform_banner?(project)
end
def show_lfs_misconfiguration_banner?(project)
- return false unless Feature.enabled?(:lfs_misconfiguration_banner)
return false unless project.repository
return false unless project.lfs_enabled?
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index a6ea3ec16179..ea2fdf3f4ba7 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -903,14 +903,6 @@ def license_name
it { is_expected.to be_falsey }
end
-
- context 'when lfs_misconfiguration_banner feature flag is disabled' do
- before do
- stub_feature_flags(lfs_misconfiguration_banner: false)
- end
-
- it { is_expected.to be_falsey }
- end
end
context 'when it does have a .gitattributes file' do

View File

@ -0,0 +1,9 @@
---
name: lfs_misconfiguration_banner
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/429467
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/162123
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/477892
milestone: '17.4'
group: group::source code
type: gitlab_com_derisk
default_enabled: false

View File

@ -1,13 +1,10 @@
import path from 'node:path';
import { fileURLToPath } from 'node:url';
/* eslint-disable import/extensions */
import {
resolveCompilationTargetsForVite,
resolveLoadPaths,
} from '../../scripts/frontend/lib/compile_css.mjs';
/* eslint-enable import/extensions */
const ROOT_PATH = path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../../');
const ROOT_PATH = path.resolve(import.meta.dirname, '../../');
/**
* This Plugin provides virtual entrypoints for our SCSS files

View File

@ -1,77 +0,0 @@
# frozen_string_literal: true
return unless Gitlab::Runtime.puma?
require 'puma'
require 'puma/cluster'
# Ruby 3.1 and 3.2 have bugs that prevents Puma from reaping child processes properly:
# https://bugs.ruby-lang.org/issues/20490
# https://bugs.ruby-lang.org/issues/19837
#
# https://github.com/puma/puma/pull/3314 fixes this in Puma, but a release
# has not been forthcoming.
if Gem::Version.new(Puma::Const::PUMA_VERSION) > Gem::Version.new('6.5')
raise 'This patch should not be needed after Puma 6.5.0.'
end
# rubocop:disable Style/RedundantBegin -- These are upstream changes
# rubocop:disable Cop/LineBreakAfterGuardClauses -- These are upstream changes
# rubocop:disable Layout/EmptyLineAfterGuardClause -- These are upstream changes
module Puma
class Cluster < Runner
# loops thru @workers, removing workers that exited, and calling
# `#term` if needed
def wait_workers
# Reap all children, known workers or otherwise.
# If puma has PID 1, as it's common in containerized environments,
# then it's responsible for reaping orphaned processes, so we must reap
# all our dead children, regardless of whether they are workers we spawned
# or some reattached processes.
reaped_children = {}
loop do
begin
pid, status = Process.wait2(-1, Process::WNOHANG)
break unless pid
reaped_children[pid] = status
rescue Errno::ECHILD
break
end
end
@workers.reject! do |w|
next false if w.pid.nil?
begin
# We may need to check the PID individually because:
# 1. From Ruby versions 2.6 to 3.2, `Process.detach` can prevent or delay
# `Process.wait2(-1)` from detecting a terminated process: https://bugs.ruby-lang.org/issues/19837.
# 2. When `fork_worker` is enabled, some worker may not be direct children,
# but grand children. Because of this they won't be reaped by `Process.wait2(-1)`.
if reaped_children.delete(w.pid) || Process.wait(w.pid, Process::WNOHANG)
true
else
w.term if w.term?
nil
end
rescue Errno::ECHILD
begin
Process.kill(0, w.pid)
# child still alive but has another parent (e.g., using fork_worker)
w.term if w.term?
false
rescue Errno::ESRCH, Errno::EPERM
true # child is already terminated
end
end
end
# Log unknown children
reaped_children.each do |pid, status|
log "! reaped unknown child process pid=#{pid} status=#{status}"
end
end
end
end
# rubocop:enable Style/RedundantBegin
# rubocop:enable Cop/LineBreakAfterGuardClauses
# rubocop:enable Layout/EmptyLineAfterGuardClause

View File

@ -0,0 +1,8 @@
---
migration_job_name: BackfillGroupWikiRepositoryStatesGroupId
description: Backfills sharding key `group_wiki_repository_states.group_id` from `group_wiki_repositories`.
feature_category: geo_replication
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/169242
milestone: '17.7'
queued_migration_version: 20241125133627
finalized_by: # version of the migration that finalized this BBM

View File

@ -19,3 +19,4 @@ desired_sharding_key:
sharding_key: group_id
belongs_to: group_wiki_repository
table_size: small
desired_sharding_key_migration_job_name: BackfillGroupWikiRepositoryStatesGroupId

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
class AddRoleApproversToApprovalMergeRequestRules < Gitlab::Database::Migration[2.2]
milestone '17.6'
milestone '17.7'
disable_ddl_transaction!
CONSTRAINT_NAME = 'check_approval_m_r_rules_allowed_role_approvers_valid_entries'

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddGroupIdToGroupWikiRepositoryStates < Gitlab::Database::Migration[2.2]
milestone '17.7'
def change
add_column :group_wiki_repository_states, :group_id, :bigint
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class IndexGroupWikiRepositoryStatesOnGroupId < Gitlab::Database::Migration[2.2]
milestone '17.7'
disable_ddl_transaction!
INDEX_NAME = 'index_group_wiki_repository_states_on_group_id'
def up
add_concurrent_index :group_wiki_repository_states, :group_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :group_wiki_repository_states, INDEX_NAME
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddGroupWikiRepositoryStatesGroupIdFk < Gitlab::Database::Migration[2.2]
milestone '17.7'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :group_wiki_repository_states, :namespaces, column: :group_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :group_wiki_repository_states, column: :group_id
end
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
class AddGroupWikiRepositoryStatesGroupIdTrigger < Gitlab::Database::Migration[2.2]
milestone '17.7'
def up
install_sharding_key_assignment_trigger(
table: :group_wiki_repository_states,
sharding_key: :group_id,
parent_table: :group_wiki_repositories,
parent_table_primary_key: :group_id,
parent_sharding_key: :group_id,
foreign_key: :group_wiki_repository_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :group_wiki_repository_states,
sharding_key: :group_id,
parent_table: :group_wiki_repositories,
parent_table_primary_key: :group_id,
parent_sharding_key: :group_id,
foreign_key: :group_wiki_repository_id
)
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class QueueBackfillGroupWikiRepositoryStatesGroupId < Gitlab::Database::Migration[2.2]
milestone '17.7'
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
MIGRATION = "BackfillGroupWikiRepositoryStatesGroupId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:group_wiki_repository_states,
:id,
:group_id,
:group_wiki_repositories,
:group_id,
:group_wiki_repository_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:group_wiki_repository_states,
:id,
[
:group_id,
:group_wiki_repositories,
:group_id,
:group_wiki_repository_id
]
)
end
end

View File

@ -0,0 +1 @@
a343179b4ff10dc52c123c4c16eae512dac082f1e732905b7b9d1ed797cadd39

View File

@ -0,0 +1 @@
f3a17c383994b89d08effcf7858d5f62b34f00a3e88d0585b55ce1b2d2b98200

View File

@ -0,0 +1 @@
5b2655c6752ea3d16426262d7ba7182c4ce54445bfad284e177f4f295b1879fa

View File

@ -0,0 +1 @@
10b4dbd09a1888c465a7899a6dcaf4875873980fbf93fd238b384136fd012b57

View File

@ -0,0 +1 @@
3cc2c84bc6f4ca87de823ad9ecaed8f4bc4699d65d272793fb9d5995a3c1e4ac

View File

@ -2337,6 +2337,22 @@ RETURN NEW;
END
$$;
CREATE FUNCTION trigger_a22be47501db() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."group_id" IS NULL THEN
SELECT "group_id"
INTO NEW."group_id"
FROM "group_wiki_repositories"
WHERE "group_wiki_repositories"."group_id" = NEW."group_wiki_repository_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_a253cb3cacdf() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -12763,6 +12779,7 @@ CREATE TABLE group_wiki_repository_states (
verification_retry_count smallint DEFAULT 0 NOT NULL,
verification_checksum bytea,
verification_failure text,
group_id bigint,
CONSTRAINT check_14d288436d CHECK ((char_length(verification_failure) <= 255))
);
@ -30320,6 +30337,8 @@ CREATE INDEX index_group_wiki_repository_states_failed_verification ON group_wik
CREATE INDEX index_group_wiki_repository_states_needs_verification ON group_wiki_repository_states USING btree (verification_state) WHERE ((verification_state = 0) OR (verification_state = 3));
CREATE INDEX index_group_wiki_repository_states_on_group_id ON group_wiki_repository_states USING btree (group_id);
CREATE UNIQUE INDEX index_group_wiki_repository_states_on_group_wiki_repository_id ON group_wiki_repository_states USING btree (group_wiki_repository_id);
CREATE INDEX index_group_wiki_repository_states_on_verification_state ON group_wiki_repository_states USING btree (verification_state);
@ -35206,6 +35225,8 @@ CREATE TRIGGER trigger_9f3de326ea61 BEFORE INSERT OR UPDATE ON ci_pipeline_sched
CREATE TRIGGER trigger_a1bc7c70cbdf BEFORE INSERT OR UPDATE ON vulnerability_user_mentions FOR EACH ROW EXECUTE FUNCTION trigger_a1bc7c70cbdf();
CREATE TRIGGER trigger_a22be47501db BEFORE INSERT OR UPDATE ON group_wiki_repository_states FOR EACH ROW EXECUTE FUNCTION trigger_a22be47501db();
CREATE TRIGGER trigger_a253cb3cacdf BEFORE INSERT OR UPDATE ON dora_daily_metrics FOR EACH ROW EXECUTE FUNCTION trigger_a253cb3cacdf();
CREATE TRIGGER trigger_a465de38164e BEFORE INSERT OR UPDATE ON ci_job_artifact_states FOR EACH ROW EXECUTE FUNCTION trigger_a465de38164e();
@ -35934,6 +35955,9 @@ ALTER TABLE ONLY dast_profile_schedules
ALTER TABLE ONLY events
ADD CONSTRAINT fk_61fbf6ca48 FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY group_wiki_repository_states
ADD CONSTRAINT fk_621768bf3d FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY vulnerability_reads
ADD CONSTRAINT fk_62736f638f FOREIGN KEY (vulnerability_id) REFERENCES vulnerabilities(id) ON DELETE CASCADE;

View File

@ -50,49 +50,6 @@ If the secondary site is not able to reconnect, use the following steps to remov
1. Follow either the steps [to remove that Geo site](../remove_geo_site.md) if it's no longer required,
or [re-initiate the replication process](../../setup/database.md#step-3-initiate-the-replication-process), which recreates the replication slot correctly.
### Message: `"Error during verification","error":"File is not checksummable"`
If you encounter these errors in your primary site `geo.log`, they're also reflected in the UI under **Admin > Geo > Sites**. To remove those errors, you can identify the particular blob that generates the message so that you can inspect it.
1. In a Puma or Sidekiq node in the primary site, [open a Rails console](../../../../administration/operations/rails_console.md#starting-a-rails-console-session).
1. Run the following snippet to find the affected artifacts containing the `File is not checksummable` message:
NOTE:
The example provided below uses `JobArtifact` blob type; however, the same solution applies to any blob type that Geo uses.
```ruby
artifacts = Ci::JobArtifact.verification_failed.where("verification_failure like '%File is not checksummable%'");1
puts "Found #{artifacts.count} artifacts that failed verification with 'File is not checksummable'. The first one:"
pp artifacts.first
```
If you determine that the affected files need to be recovered then you can explore these options (non-exhaustive) to recover the missing files:
- Check if the secondary site has the object and manually copy them to the primary.
- Look through old backups and manually copy the object back into the primary site.
- Spot check some to try to determine that it's probably fine to destroy the records, for example, if they are all very old artifacts, then maybe they are not critical data.
Often, these kinds of errors happen when a file is checksummed by Geo, and then goes missing from the primary site. After you identify the affected files, you should check the projects that the files belong to from the UI to decide if it's acceptable to delete the file reference. If so, you can destroy the references with the following irreversible snippet:
```ruby
def destroy_artifacts_not_checksummable
artifacts = Ci::JobArtifact.verification_failed.where("verification_failure like '%File is not checksummable%'");1
puts "Found #{artifacts.count} artifacts that failed verification with 'File is not checksummable'."
puts "Enter 'y' to continue: "
prompt = STDIN.gets.chomp
if prompt != 'y'
puts "Exiting without action..."
return
end
puts "Destroying all..."
artifacts.destroy_all
end
destroy_artifacts_not_checksummable
```
## Message: `WARNING: oldest xmin is far in the past` and `pg_wal` size growing
If a replication slot is inactive,

View File

@ -279,6 +279,49 @@ end
p "#{uploads_deleted} remote objects were destroyed."
```
### Message: `"Error during verification","error":"File is not checksummable"`
If you encounter these errors in your primary site `geo.log`, they're also reflected in the UI under **Admin > Geo > Sites**. To remove those errors, you can identify the particular blob that generates the message so that you can inspect it.
1. In a Puma or Sidekiq node in the primary site, [open a Rails console](../../../../administration/operations/rails_console.md#starting-a-rails-console-session).
1. Run the following snippet to find the affected artifacts containing the `File is not checksummable` message:
NOTE:
The example provided below uses `JobArtifact` blob type; however, the same solution applies to any blob type that Geo uses.
```ruby
artifacts = Ci::JobArtifact.verification_failed.where("verification_failure like '%File is not checksummable%'");1
puts "Found #{artifacts.count} artifacts that failed verification with 'File is not checksummable'. The first one:"
pp artifacts.first
```
If you determine that the affected files need to be recovered then you can explore these options (non-exhaustive) to recover the missing files:
- Check if the secondary site has the object and manually copy them to the primary.
- Look through old backups and manually copy the object back into the primary site.
- Spot check some to try to determine that it's probably fine to destroy the records, for example, if they are all very old artifacts, then maybe they are not critical data.
Often, these kinds of errors happen when a file is checksummed by Geo, and then goes missing from the primary site. After you identify the affected files, you should check the projects that the files belong to from the UI to decide if it's acceptable to delete the file reference. If so, you can destroy the references with the following irreversible snippet:
```ruby
def destroy_artifacts_not_checksummable
artifacts = Ci::JobArtifact.verification_failed.where("verification_failure like '%File is not checksummable%'");1
puts "Found #{artifacts.count} artifacts that failed verification with 'File is not checksummable'."
puts "Enter 'y' to continue: "
prompt = STDIN.gets.chomp
if prompt != 'y'
puts "Exiting without action..."
return
end
puts "Destroying all..."
artifacts.destroy_all
end
destroy_artifacts_not_checksummable
```
### Error: `Error syncing repository: 13:fatal: could not read Username`
The `last_sync_failure` error

View File

@ -725,6 +725,16 @@ Returns [`Iteration`](#iteration).
| ---- | ---- | ----------- |
| <a id="queryiterationid"></a>`id` | [`IterationID!`](#iterationid) | Find an iteration by its ID. |
### `Query.jobTokenPoliciesByCategory`
List of job token policies for use with fine-grained permissions on CI/CD job allowlist.
DETAILS:
**Introduced** in GitLab 17.7.
**Status**: Experiment.
Returns [`[JobTokenPolicyCategory!]`](#jobtokenpolicycategory).
### `Query.jobs`
All jobs on this GitLab instance. Returns an empty result for users without administrator access.
@ -26868,6 +26878,32 @@ Represents the Geo replication and verification state of a job_artifact.
| <a id="jobpermissionsreadjobartifacts"></a>`readJobArtifacts` | [`Boolean!`](#boolean) | If `true`, the user can perform `read_job_artifacts` on this resource. |
| <a id="jobpermissionsupdatebuild"></a>`updateBuild` | [`Boolean!`](#boolean) | If `true`, the user can perform `update_build` on this resource. |
### `JobTokenPolicy`
Job token policy.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="jobtokenpolicydescription"></a>`description` | [`String`](#string) | Description of the job token policy. |
| <a id="jobtokenpolicytext"></a>`text` | [`String`](#string) | Display text of the job token policy. |
| <a id="jobtokenpolicytype"></a>`type` | [`CiJobTokenScopePolicyTypes`](#cijobtokenscopepolicytypes) | Job token policy type. |
| <a id="jobtokenpolicyvalue"></a>`value` | [`CiJobTokenScopePolicies`](#cijobtokenscopepolicies) | Value of the job token policy. |
### `JobTokenPolicyCategory`
Job token policy category type.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="jobtokenpolicycategorydescription"></a>`description` | [`String`](#string) | Description of the category. |
| <a id="jobtokenpolicycategorypolicies"></a>`policies` | [`[JobTokenPolicy!]`](#jobtokenpolicy) | Policies of the category. |
| <a id="jobtokenpolicycategorytext"></a>`text` | [`String`](#string) | Display text of the category. |
| <a id="jobtokenpolicycategoryvalue"></a>`value` | [`CiJobTokenScopePolicyCategoriesTypes`](#cijobtokenscopepolicycategoriestypes) | Value of the category. |
### `Kas`
#### Fields
@ -36088,6 +36124,7 @@ Represents a vulnerability.
| <a id="vulnerabilityprimaryidentifier"></a>`primaryIdentifier` | [`VulnerabilityIdentifier`](#vulnerabilityidentifier) | Primary identifier of the vulnerability. |
| <a id="vulnerabilityproject"></a>`project` | [`Project`](#project) | Project on which the vulnerability was found. |
| <a id="vulnerabilityreporttype"></a>`reportType` | [`VulnerabilityReportType`](#vulnerabilityreporttype) | Type of the security report that found the vulnerability (SAST, DEPENDENCY_SCANNING, CONTAINER_SCANNING, DAST, SECRET_DETECTION, COVERAGE_FUZZING, API_FUZZING, CLUSTER_IMAGE_SCANNING, CONTAINER_SCANNING_FOR_REGISTRY, GENERIC). `Scan Type` in the UI. |
| <a id="vulnerabilityrepresentationinformation"></a>`representationInformation` **{warning-solid}** | [`VulnerabilityRepresentationInformation`](#vulnerabilityrepresentationinformation) | **Introduced** in GitLab 17.7. **Status**: Experiment. Information about the representation of the vulnerability, such as resolved commit SHA. |
| <a id="vulnerabilityresolvedat"></a>`resolvedAt` | [`Time`](#time) | Timestamp of when the vulnerability state was changed to resolved. |
| <a id="vulnerabilityresolvedby"></a>`resolvedBy` | [`UserCore`](#usercore) | User that resolved the vulnerability. |
| <a id="vulnerabilityresolvedondefaultbranch"></a>`resolvedOnDefaultBranch` | [`Boolean!`](#boolean) | Indicates whether the vulnerability is fixed on the default branch or not. |
@ -36631,6 +36668,16 @@ Represents a vulnerability remediation type.
| <a id="vulnerabilityremediationtypediff"></a>`diff` | [`String`](#string) | Diff of the remediation. |
| <a id="vulnerabilityremediationtypesummary"></a>`summary` | [`String`](#string) | Summary of the remediation. |
### `VulnerabilityRepresentationInformation`
Represents vulnerability information.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="vulnerabilityrepresentationinformationresolvedincommitsha"></a>`resolvedInCommitSha` | [`String`](#string) | SHA of the commit where the vulnerability was resolved. |
### `VulnerabilityRequest`
Represents a Vulnerability Request.
@ -37999,41 +38046,46 @@ CI_JOB_TOKEN policy.
| Value | Description |
| ----- | ----------- |
| <a id="cijobtokenscopepoliciesadmin_container_image"></a>`ADMIN_CONTAINER_IMAGE` | Admin Container Image. |
| <a id="cijobtokenscopepoliciesadmin_secure_files"></a>`ADMIN_SECURE_FILES` | Admin Secure Files. |
| <a id="cijobtokenscopepoliciesadmin_terraform_state"></a>`ADMIN_TERRAFORM_STATE` | Admin Terraform State. |
| <a id="cijobtokenscopepoliciesbuild_create_container_image"></a>`BUILD_CREATE_CONTAINER_IMAGE` | Build Create Container Image. |
| <a id="cijobtokenscopepoliciesbuild_destroy_container_image"></a>`BUILD_DESTROY_CONTAINER_IMAGE` | Build Destroy Container Image. |
| <a id="cijobtokenscopepoliciesbuild_download_code"></a>`BUILD_DOWNLOAD_CODE` | Build Download Code. |
| <a id="cijobtokenscopepoliciesbuild_push_code"></a>`BUILD_PUSH_CODE` | Build Push Code. |
| <a id="cijobtokenscopepoliciesbuild_read_container_image"></a>`BUILD_READ_CONTAINER_IMAGE` | Build Read Container Image. |
| <a id="cijobtokenscopepoliciescreate_deployment"></a>`CREATE_DEPLOYMENT` | Create Deployment. |
| <a id="cijobtokenscopepoliciescreate_environment"></a>`CREATE_ENVIRONMENT` | Create Environment. |
| <a id="cijobtokenscopepoliciescreate_on_demand_dast_scan"></a>`CREATE_ON_DEMAND_DAST_SCAN` | Create On Demand Dast Scan. |
| <a id="cijobtokenscopepoliciescreate_package"></a>`CREATE_PACKAGE` | Create Package. |
| <a id="cijobtokenscopepoliciescreate_release"></a>`CREATE_RELEASE` | Create Release. |
| <a id="cijobtokenscopepoliciesdestroy_container_image"></a>`DESTROY_CONTAINER_IMAGE` | Destroy Container Image. |
| <a id="cijobtokenscopepoliciesdestroy_deployment"></a>`DESTROY_DEPLOYMENT` | Destroy Deployment. |
| <a id="cijobtokenscopepoliciesdestroy_environment"></a>`DESTROY_ENVIRONMENT` | Destroy Environment. |
| <a id="cijobtokenscopepoliciesdestroy_package"></a>`DESTROY_PACKAGE` | Destroy Package. |
| <a id="cijobtokenscopepoliciesdestroy_release"></a>`DESTROY_RELEASE` | Destroy Release. |
| <a id="cijobtokenscopepoliciesread_build"></a>`READ_BUILD` | Read Build. |
| <a id="cijobtokenscopepoliciesread_container_image"></a>`READ_CONTAINER_IMAGE` | Read Container Image. |
| <a id="cijobtokenscopepoliciesread_deployment"></a>`READ_DEPLOYMENT` | Read Deployment. |
| <a id="cijobtokenscopepoliciesread_environment"></a>`READ_ENVIRONMENT` | Read Environment. |
| <a id="cijobtokenscopepoliciesread_group"></a>`READ_GROUP` | Read Group. |
| <a id="cijobtokenscopepoliciesread_job_artifacts"></a>`READ_JOB_ARTIFACTS` | Read Job Artifacts. |
| <a id="cijobtokenscopepoliciesread_package"></a>`READ_PACKAGE` | Read Package. |
| <a id="cijobtokenscopepoliciesread_pipeline"></a>`READ_PIPELINE` | Read Pipeline. |
| <a id="cijobtokenscopepoliciesread_project"></a>`READ_PROJECT` | Read Project. |
| <a id="cijobtokenscopepoliciesread_release"></a>`READ_RELEASE` | Read Release. |
| <a id="cijobtokenscopepoliciesread_secure_files"></a>`READ_SECURE_FILES` | Read Secure Files. |
| <a id="cijobtokenscopepoliciesread_terraform_state"></a>`READ_TERRAFORM_STATE` | Read Terraform State. |
| <a id="cijobtokenscopepoliciesstop_environment"></a>`STOP_ENVIRONMENT` | Stop Environment. |
| <a id="cijobtokenscopepoliciesupdate_deployment"></a>`UPDATE_DEPLOYMENT` | Update Deployment. |
| <a id="cijobtokenscopepoliciesupdate_environment"></a>`UPDATE_ENVIRONMENT` | Update Environment. |
| <a id="cijobtokenscopepoliciesupdate_pipeline"></a>`UPDATE_PIPELINE` | Update Pipeline. |
| <a id="cijobtokenscopepoliciesupdate_release"></a>`UPDATE_RELEASE` | Update Release. |
| <a id="cijobtokenscopepoliciesadmin_containers"></a>`ADMIN_CONTAINERS` | Admin container images in a project. |
| <a id="cijobtokenscopepoliciesadmin_deployments"></a>`ADMIN_DEPLOYMENTS` | Admin deployments in a project. |
| <a id="cijobtokenscopepoliciesadmin_environments"></a>`ADMIN_ENVIRONMENTS` | Admin + Stop environments in a project. |
| <a id="cijobtokenscopepoliciesadmin_jobs"></a>`ADMIN_JOBS` | Read job metadata, upload artifacts and update the pipeline status. |
| <a id="cijobtokenscopepoliciesadmin_packages"></a>`ADMIN_PACKAGES` | Admin packages. |
| <a id="cijobtokenscopepoliciesadmin_releases"></a>`ADMIN_RELEASES` | Admin releases in a project. |
| <a id="cijobtokenscopepoliciesadmin_secure_files"></a>`ADMIN_SECURE_FILES` | Admin secure files in a project. |
| <a id="cijobtokenscopepoliciesadmin_terraform_state"></a>`ADMIN_TERRAFORM_STATE` | Admin terraform state files/versions. |
| <a id="cijobtokenscopepoliciesread_containers"></a>`READ_CONTAINERS` | Read container images in a project. |
| <a id="cijobtokenscopepoliciesread_deployments"></a>`READ_DEPLOYMENTS` | Read deployments in a project. |
| <a id="cijobtokenscopepoliciesread_environments"></a>`READ_ENVIRONMENTS` | Read environments in a project. |
| <a id="cijobtokenscopepoliciesread_jobs"></a>`READ_JOBS` | Read job metadata and artifacts. |
| <a id="cijobtokenscopepoliciesread_packages"></a>`READ_PACKAGES` | Read packages. |
| <a id="cijobtokenscopepoliciesread_releases"></a>`READ_RELEASES` | Read releases in a project. |
| <a id="cijobtokenscopepoliciesread_secure_files"></a>`READ_SECURE_FILES` | Read secure files in a project. |
| <a id="cijobtokenscopepoliciesread_terraform_state"></a>`READ_TERRAFORM_STATE` | Read terraform state files/version. |
### `CiJobTokenScopePolicyCategoriesTypes`
CI_JOB_TOKEN policy category type.
| Value | Description |
| ----- | ----------- |
| <a id="cijobtokenscopepolicycategoriestypescontainers"></a>`CONTAINERS` | Containers category. |
| <a id="cijobtokenscopepolicycategoriestypesdeployments"></a>`DEPLOYMENTS` | Deployments category. |
| <a id="cijobtokenscopepolicycategoriestypesenvironments"></a>`ENVIRONMENTS` | Environments category. |
| <a id="cijobtokenscopepolicycategoriestypesjobs"></a>`JOBS` | Jobs category. |
| <a id="cijobtokenscopepolicycategoriestypespackages"></a>`PACKAGES` | Packages category. |
| <a id="cijobtokenscopepolicycategoriestypesreleases"></a>`RELEASES` | Releases category. |
| <a id="cijobtokenscopepolicycategoriestypessecure_files"></a>`SECURE_FILES` | Secure files category. |
| <a id="cijobtokenscopepolicycategoriestypesterraform_state"></a>`TERRAFORM_STATE` | Terraform state category. |
### `CiJobTokenScopePolicyTypes`
CI_JOB_TOKEN policy type.
| Value | Description |
| ----- | ----------- |
| <a id="cijobtokenscopepolicytypesadmin"></a>`ADMIN` | Admin access to the resource. |
| <a id="cijobtokenscopepolicytypesread"></a>`READ` | Read-only access to the resource. |
### `CiRunnerAccessLevel`

View File

@ -94,7 +94,7 @@ curl --header "Authorization:<personal_access_token>" "https://gitlab.example.co
This endpoint returns a marshalled array of hashes for all versions of the requested gems. Since the
response is marshalled, you can store it in a file. If Ruby is installed, you can use the following
Ruby command to read the response. For this to work, you must
[set your credentials in `~/.gem/credentials`](../../user/packages/rubygems_registry/index.md#authenticate-with-a-personal-access-token-or-deploy-token):
[set your credentials in `~/.gem/credentials`](../../user/packages/rubygems_registry/index.md#authenticate-to-the-package-registry):
```shell
$ ruby -ropen-uri -rpp -e \

View File

@ -349,7 +349,9 @@ artifact and existing [requirements](../../user/project/requirements/index.md) a
GitLab can display the results of one or more reports in the
[project requirements](../../user/project/requirements/index.md#view-a-requirement).
## `artifacts:reports:repository_xray`
<!--- start_remove The following content will be removed on remove_date: '2025-08-15' -->
## `artifacts:reports:repository_xray` (deprecated)
DETAILS:
**Tier:** Premium, Ultimate
@ -358,6 +360,12 @@ DETAILS:
The `repository_xray` report collects information about your repository for use by GitLab Duo Code Suggestions.
WARNING:
This feature was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/500146) in GitLab 17.6
and is planned for removal in 18.0. Use [Enable Repository X-Ray](../../user/project/repository/code_suggestions/repository_xray.md#enable-repository-x-ray) instead.
<!--- end_remove -->
## `artifacts:reports:sast`
The `sast` report collects [SAST vulnerabilities](../../user/application_security/sast/index.md).

View File

@ -123,6 +123,9 @@ If you are an administrator, you can create group access tokens in the Rails con
> - Ability to view revoked tokens [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/462217) in GitLab 17.3 [with a flag](../../../administration/feature_flags.md) named `retain_resource_access_token_user_after_revoke`. Disabled by default.
FLAG:
The availability of being able to view revoked tokens is controlled by a feature flag. For more information, see the history.
In GitLab 17.3 and later, if you enable the `retain_resource_access_token_user_after_revoke`
feature flag, you can view both active and inactive revoked group access tokens
on the access tokens page. If you do not enable the feature flag, you can only view

View File

@ -11,50 +11,21 @@ DETAILS:
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
**Status:** Experiment
WARNING:
The Ruby gems package registry for GitLab is under development and isn't ready for production use due to
limited functionality. This [epic](https://gitlab.com/groups/gitlab-org/-/epics/3200) details the remaining
work and timelines to make it production ready.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/52147) in GitLab 13.9 [with a flag](../../../administration/feature_flags.md) named `rubygem_packages`. Disabled by default. This feature is an [experiment](../../../policy/experiment-beta-support.md).
You can publish Ruby gems in your project's package registry, then install the packages when you
need to use them as a dependency. Although you can push gems to the registry, you cannot install
them from the registry. However, you can download `gem` files directly from the package registry's
UI, or by using the [API](../../../api/packages/rubygems.md#download-a-gem-file).
FLAG:
The availability of this feature is controlled by a feature flag.
For more information, see the history.
This feature is available for testing, but not ready for production use.
For documentation of the specific API endpoints that the Ruby gems and Bundler package manager
clients use, see the [Ruby gems API documentation](../../../api/packages/rubygems.md).
You can publish Ruby gems to your project's package registry. Then, you can download them from the UI or with the API.
## Enable the Ruby gems registry
The Ruby gems registry for GitLab is behind a feature flag that is disabled by default. GitLab
administrators with access to the GitLab Rails console can enable this registry for your instance.
To enable it:
```ruby
Feature.enable(:rubygem_packages)
```
To disable it:
```ruby
Feature.disable(:rubygem_packages)
```
To enable or disable it for specific projects:
```ruby
Feature.enable(:rubygem_packages, Project.find(1))
Feature.disable(:rubygem_packages, Project.find(2))
```
## Create a Ruby gem
If you need help creating a Ruby gem, see the [RubyGems documentation](https://guides.rubygems.org/make-your-own-gem/).
This feature is an [experiment](../../../policy/experiment-beta-support.md).
For more information about the development of this feature, see [epic 3200](https://gitlab.com/groups/gitlab-org/-/epics/3200).
## Authenticate to the package registry
Before you can push to the package registry, you must authenticate.
Before you can interact with the package registry, you must authenticate to it.
To do this, you can use:
@ -62,89 +33,109 @@ To do this, you can use:
with the scope set to `api`.
- A [deploy token](../../project/deploy_tokens/index.md) with the scope set to
`read_package_registry`, `write_package_registry`, or both.
- A [CI job token](#authenticate-with-a-ci-job-token).
### Authenticate with a personal access token or deploy token
To authenticate with a personal access token, create or edit the `~/.gem/credentials` file and add:
```ini
---
https://gitlab.example.com/api/v4/projects/<project_id>/packages/rubygems: '<your token>'
```
- `<your token>` must be the token value of either your personal access token or deploy token.
- Your project ID is displayed on the [project overview page](../../project/working_with_projects.md#access-a-project-by-using-the-project-id).
### Authenticate with a CI job token
To work with RubyGems commands within [GitLab CI/CD](../../../ci/index.md),
you can use the [`CI_JOB_TOKEN`](../../../ci/jobs/ci_job_token.md) predefined environment variable instead of a personal access token or deploy token.
- A [CI/CD job token](../../../ci/jobs/ci_job_token.md).
For example:
```yaml
# assuming a my_gem.gemspec file is present in the repository with the version currently set to 0.0.1
image: ruby
::Tabs
run:
before_script:
- mkdir ~/.gem
- echo "---" > ~/.gem/credentials
- |
echo "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/rubygems: '${CI_JOB_TOKEN}'" >> ~/.gem/credentials
- chmod 0600 ~/.gem/credentials # rubygems requires 0600 permissions on the credentials file
script:
- gem build my_gem
- gem push my_gem-0.0.1.gem --host ${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/rubygems
```
:::TabTitle With an access token
You can also use `CI_JOB_TOKEN` in a `~/.gem/credentials` file that you check in to
GitLab:
To authenticate with an access token:
```ini
---
https://gitlab.example.com/api/v4/projects/${env.CI_PROJECT_ID}/packages/rubygems: '${env.CI_JOB_TOKEN}'
```
- Create or edit your `~/.gem/credentials` file, and add:
```ini
---
https://gitlab.example.com/api/v4/projects/<project_id>/packages/rubygems: '<token>'
```
In this example:
- `<token>` must be the token value of either your personal access token or deploy token.
- `<project_id>` is displayed on the [project overview page](../../project/working_with_projects.md#access-a-project-by-using-the-project-id).
:::TabTitle With a CI/CD job token
To authenticate with a CI/CD job token:
- Create or edit your `.gitlab-ci.yml` file, and add:
```yaml
# assuming a my_gem.gemspec file is present in the repository with the version currently set to 0.0.1
image: ruby
run:
before_script:
- mkdir ~/.gem
- echo "---" > ~/.gem/credentials
- |
echo "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/rubygems: '${CI_JOB_TOKEN}'" >> ~/.gem/credentials
- chmod 0600 ~/.gem/credentials # rubygems requires 0600 permissions on the credentials file
script:
- gem build my_gem
- gem push my_gem-0.0.1.gem --host ${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/rubygems
```
You can also use `CI_JOB_TOKEN` in a `~/.gem/credentials` file you check in to GitLab:
```ini
---
https://gitlab.example.com/api/v4/projects/${env.CI_PROJECT_ID}/packages/rubygems: '${env.CI_JOB_TOKEN}'
```
::EndTabs
## Push a Ruby gem
Prerequisites:
- You must [authenticate to the package registry](#authenticate-to-the-package-registry).
- The maximum allowed gem size is 3 GB.
- Your Ruby gem must be 3 GB or less.
To push your gem, run a command like this one:
To do this:
```shell
gem push my_gem-0.0.1.gem --host <host>
```
- Run a command like:
`<host>` is the URL you used when setting up authentication. For example:
```shell
gem push my_gem-0.0.1.gem --host <host>
```
```shell
gem push my_gem-0.0.1.gem --host https://gitlab.example.com/api/v4/projects/1/packages/rubygems
```
In this example, `<host>` is the URL you used when setting up authentication. For example:
This message indicates that the gem uploaded successfully:
```shell
gem push my_gem-0.0.1.gem --host https://gitlab.example.com/api/v4/projects/1/packages/rubygems
```
When a gem is published successfully, a message like this is displayed:
```plaintext
Pushing gem to https://gitlab.example.com/api/v4/projects/1/packages/rubygems...
{"message":"201 Created"}
```
To view the published gem, go to your project's **Packages and registries** page. Gems pushed to
GitLab aren't displayed in your project's Packages UI immediately. It can take up to 10 minutes to
process a gem.
The gem is published to your package registry, and is shown on the **Packages and registries** page.
It can take up to 10 minutes before GitLab processes and displays your gem.
### Pushing gems with the same name or version
You can push a gem if a package of the same name and version already exists.
Both are visible and accessible in the UI. However, only the most recently
pushed gem is used for installs.
Both are visible and accessible in the UI.
## Install a Ruby gem
## Download gems
The Ruby gems registry for GitLab is under development, and isn't ready for production use. You
cannot install Gems from the registry. However, you can download `.gem` files directly from the UI
or by using the [API](../../../api/packages/rubygems.md#download-a-gem-file).
You can't install Ruby gems from the GitLab package registry. However, you can download gem files for local use.
To do this:
1. On the left sidebar, select **Search or go to** and find your project.
1. Select **Deploy > Package registry**.
1. Select the package name and version.
1. Under **Assets**, select the Ruby gem you want to download.
To download Ruby gems, you can also [use the API](../../../api/packages/rubygems.md#download-a-gem-file).
## Related topics
- [Make your own gem](https://guides.rubygems.org/make-your-own-gem/)
- [Ruby gems API documentation](../../../api/packages/rubygems.md)

View File

@ -21,8 +21,8 @@ can then update the status of merge requests from outside of GitLab.
With this integration, you can integrate with third-party workflow tools, like
ServiceNow, or the custom tool of your choice. The third-party tool
respond with an associated status. This status is then displayed as a non-blocking
widget within the merge request to surface this status to the merge request author or reviewers
responds with an associated status. This status is then displayed as a non-blocking
widget within the merge request, which surfaces this status to the merge request author or reviewers
at the merge request level itself.
You can configure merge request status checks for each individual project. These are not shared between projects.

View File

@ -77,6 +77,9 @@ all projects that have visibility level set to [Internal](../../public_access.md
> - Ability to view revoked tokens [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/462217) in GitLab 17.3 [with a flag](../../../administration/feature_flags.md) named `retain_resource_access_token_user_after_revoke`. Disabled by default.
FLAG:
The availability of being able to view revoked tokens is controlled by a feature flag. For more information, see the history.
To revoke a project access token:
1. On the left sidebar, select **Search or go to** and find your project.

View File

@ -1,14 +1,12 @@
/* eslint-disable import/no-default-export */
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { existsSync } from 'node:fs';
import localRules from 'eslint-plugin-local-rules';
import js from '@eslint/js';
import { FlatCompat } from '@eslint/eslintrc';
import * as graphqlEslint from '@graphql-eslint/eslint-plugin';
const filename = fileURLToPath(import.meta.url);
const dirname = path.dirname(filename);
const { dirname } = import.meta;
const compat = new FlatCompat({
baseDirectory: dirname,
recommendedConfig: js.configs.recommended,
@ -545,6 +543,7 @@ export default [
rules: {
'@gitlab/require-i18n-strings': 'off',
'import/extensions': 'off',
'import/no-extraneous-dependencies': 'off',
'import/no-commonjs': 'off',
'import/no-nodejs-modules': 'off',

View File

@ -3,52 +3,124 @@
module Ci
module JobToken
module Policies
# policies that every CI job token needs
FIXED = [
:build_create_container_image,
:build_destroy_container_image,
:build_download_code,
:build_push_code,
:build_read_container_image,
:read_project
].freeze
# policies that can be assigned to a CI job token
ALLOWED = [
:admin_container_image,
:admin_secure_files,
:admin_terraform_state,
:create_deployment,
:create_environment,
:create_on_demand_dast_scan,
:create_package,
:create_release,
:destroy_container_image,
:destroy_deployment,
:destroy_environment,
:destroy_package,
:destroy_release,
:read_build,
:read_container_image,
:read_deployment,
:read_environment,
:read_group,
:read_job_artifacts,
:read_package,
:read_pipeline,
:read_release,
:read_secure_files,
:read_terraform_state,
:stop_environment,
:update_deployment,
:update_environment,
:update_pipeline,
:update_release
POLICIES_BY_CATEGORY = [
{
value: :containers,
text: 'Containers',
description: 'Containers category',
policies: [
{
value: :read_containers,
type: :read,
text: 'Read',
description: 'Read container images in a project'
},
{
value: :admin_containers,
type: :admin,
text: 'Read and write',
description: 'Admin container images in a project'
}
]
},
{
value: :deployments,
text: 'Deployments',
description: 'Deployments category',
policies: [
{ value: :read_deployments, type: :read, text: 'Read', description: 'Read deployments in a project' },
{
value: :admin_deployments,
type: :admin,
text: 'Read and write',
description: 'Admin deployments in a project'
}
]
},
{
value: :environments,
text: 'Environments',
description: 'Environments category',
policies: [
{ value: :read_environments, type: :read, text: 'Read', description: 'Read environments in a project' },
{
value: :admin_environments,
type: :admin,
text: 'Read and write',
description: 'Admin + Stop environments in a project'
}
]
},
{
value: :jobs,
text: 'Jobs',
description: 'Jobs category',
policies: [
{ value: :read_jobs, type: :read, text: 'Read', description: 'Read job metadata and artifacts' },
{
value: :admin_jobs,
type: :admin,
text: 'Read and write',
description: 'Read job metadata, upload artifacts and update the pipeline status'
}
]
},
{
value: :packages,
text: 'Packages',
description: 'Packages category',
policies: [
{ value: :read_packages, type: :read, text: 'Read', description: 'Read packages' },
{ value: :admin_packages, type: :admin, text: 'Read and write', description: 'Admin packages' }
]
},
{
value: :releases,
text: 'Releases',
description: 'Releases category',
policies: [
{ value: :read_releases, type: :read, text: 'Read', description: 'Read releases in a project' },
{ value: :admin_releases, type: :admin, text: 'Read and write', description: 'Admin releases in a project' }
]
},
{
value: :secure_files,
text: 'Secure files',
description: 'Secure files category',
policies: [
{ value: :read_secure_files, type: :read, text: 'Read', description: 'Read secure files in a project' },
{
value: :admin_secure_files,
type: :admin,
text: 'Read and write',
description: 'Admin secure files in a project'
}
]
},
{
value: :terraform_state,
text: 'Terraform state',
description: 'Terraform state category',
policies: [
{
value: :read_terraform_state,
type: :read,
text: 'Read',
description: 'Read terraform state files/version'
},
{
value: :admin_terraform_state,
type: :admin,
text: 'Read and write',
description: 'Admin terraform state files/versions'
}
]
}
].freeze
class << self
def all_values
(FIXED + ALLOWED).map(&:to_s)
def all_policies
POLICIES_BY_CATEGORY.flat_map { |category| category[:policies] }
end
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillGroupWikiRepositoryStatesGroupId < BackfillDesiredShardingKeyJob
operation_name :backfill_group_wiki_repository_states_group_id
feature_category :geo_replication
# override as parent table primary key is group_id
def backfill_via_table_primary_key
'group_id'
end
end
end
end

View File

@ -36782,9 +36782,6 @@ msgstr ""
msgid "Notes|Internal notes are only visible to members with the role of Planner or higher"
msgstr ""
msgid "Notes|Internal notes are only visible to members with the role of Reporter or higher"
msgstr ""
msgid "Notes|Last reply by %{name}"
msgstr ""

View File

@ -129,7 +129,7 @@ module QA
Page::Group::Settings::PackageRegistries.perform(&:set_allow_duplicates_disabled)
end
it 'prevents users from publishing duplicates',
it 'prevents users from publishing duplicates', :blocking,
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/377491' do
create_package(package_project)
package_project.visit_job('deploy')

View File

@ -1,9 +1,7 @@
#!/usr/bin/env node
import process from 'node:process';
/* eslint-disable import/extensions */
import { compileAllStyles } from './lib/compile_css.mjs';
/* eslint-enable import/extensions */
const fileWatcher = await compileAllStyles({ shouldWatch: process.argv?.includes('--watch') });

View File

@ -1,13 +1,12 @@
#!/usr/bin/env node
import { relative, dirname, resolve } from 'node:path';
import { fileURLToPath } from 'node:url';
import { relative, resolve } from 'node:path';
import Runtime from 'jest-runtime';
import { readConfig } from 'jest-config';
import createJestConfig from '../../jest.config.base';
import createJestConfig from '../../jest.config.base.js';
const ROOT = resolve(dirname(fileURLToPath(import.meta.url)), '../../');
const ROOT = resolve(import.meta.dirname, '../../');
function resolveDependenciesRecursively(context, target, seen) {
const dependencies = context.hasteFS.getDependencies(target);

View File

@ -7,16 +7,13 @@ import postcssCustomProperties from 'postcss-custom-properties';
import postcssGlobalData from '@csstools/postcss-global-data';
import { compile, Logger } from 'sass';
import glob from 'glob';
/* eslint-disable import/extensions */
import tailwindcss from 'tailwindcss/lib/plugin.js';
import tailwindConfig from '../../../config/tailwind.config.js';
import IS_EE from '../../../config/helpers/is_ee_env.js';
import IS_JH from '../../../config/helpers/is_jh_env.js';
import { postCssColorToHex } from './postcss_color_to_hex.js';
/* eslint-enable import/extensions */
// Note, in node > 21.2 we could replace the below with import.meta.dirname
const ROOT_PATH = path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../../../');
const ROOT_PATH = path.resolve(import.meta.dirname, '../../../');
const OUTPUT_PATH = path.join(ROOT_PATH, 'app/assets/builds/');
const BASE_PATH = 'app/assets/stylesheets';

View File

@ -1,8 +1,7 @@
import { dirname, join } from 'node:path';
import { fileURLToPath } from 'node:url';
import { join } from 'node:path';
import { readFile, rm } from 'node:fs/promises';
const ROOT_PATH = join(dirname(fileURLToPath(import.meta.url)), '..', '..');
const ROOT_PATH = join(import.meta.dirname, '..', '..');
const NODE_MODULES = join(ROOT_PATH, 'node_modules');
const INTEGRITY_FILE = join(NODE_MODULES, '.yarn-integrity');
const PACKAGE_JSON = join(ROOT_PATH, 'package.json');

View File

@ -1,10 +1,7 @@
/* eslint-disable import/extensions */
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import { createProcessor } from 'tailwindcss/lib/cli/build/plugin.js';
// Note, in node > 21.2 we could replace the below with import.meta.dirname
const ROOT_PATH = path.resolve(path.dirname(fileURLToPath(import.meta.url)), '../../');
const ROOT_PATH = path.resolve(import.meta.dirname, '../../');
export async function build({ shouldWatch = false, content = false } = {}) {
const processorOptions = {
@ -38,7 +35,7 @@ export async function build({ shouldWatch = false, content = false } = {}) {
}
function wasScriptCalledDirectly() {
return process.argv[1] === fileURLToPath(import.meta.url);
return process.argv[1] === import.meta.filename;
}
export function viteTailwindCompilerPlugin({ shouldWatch = true }) {

View File

@ -58,7 +58,7 @@ nodemon
console.log('The JavaScript assets are recompiled only if they change');
console.log('If you change them often, you might want to unset DEV_SERVER_STATIC');
}
/* eslint-disable import/extensions, promise/catch-or-return */
/* eslint-disable promise/catch-or-return */
import('./lib/compile_css.mjs').then(({ simplePluginForNodemon }) => {
plugin = simplePluginForNodemon({ shouldWatch: !STATIC_MODE });
return plugin?.start();
@ -67,7 +67,7 @@ nodemon
plugin = webpackTailwindCompilerPlugin({ shouldWatch: !STATIC_MODE });
return plugin?.start();
});
/* eslint-enable import/extensions, promise/catch-or-return */
/* eslint-enable promise/catch-or-return */
})
.on('quit', () => {
console.log('Shutting down CSS compilation process');

View File

@ -158,31 +158,31 @@ function debug_rspec_variables() {
function handle_retry_rspec_in_new_process() {
local rspec_run_status="${1}"
local rspec_retry_status=0
local auto_retry_status=1
local known_flaky_tests_exit_code=1
if [[ $rspec_run_status -eq 3 ]]; then
echoerr "Not retrying failing examples since we failed early on purpose!"
auto_retry_exit_code_if_known_flaky_tests || auto_retry_status=$?
exit "${auto_retry_status}"
change_exit_code_if_known_flaky_tests || known_flaky_tests_exit_code=$?
exit "${known_flaky_tests_exit_code}"
fi
if [[ $rspec_run_status -eq 2 ]]; then
echoerr "Not retrying failing examples since there were errors happening outside of the RSpec examples!"
auto_retry_exit_code_if_known_flaky_tests || auto_retry_status=$?
exit "${auto_retry_status}"
change_exit_code_if_known_flaky_tests || known_flaky_tests_exit_code=$?
exit "${known_flaky_tests_exit_code}"
fi
if [[ $rspec_run_status -eq 1 ]]; then
if is_rspec_last_run_results_file_missing; then
auto_retry_exit_code_if_known_flaky_tests || auto_retry_status=$?
exit "${auto_retry_status}"
change_exit_code_if_known_flaky_tests || known_flaky_tests_exit_code=$?
exit "${known_flaky_tests_exit_code}"
fi
local failed_examples_count=$(grep -c " failed" "${RSPEC_LAST_RUN_RESULTS_FILE}")
if [[ "${failed_examples_count}" -eq "${RSPEC_FAIL_FAST_THRESHOLD}" ]]; then
echoerr "Not retrying failing examples since we reached the maximum number of allowed test failures!"
auto_retry_exit_code_if_known_flaky_tests || auto_retry_status=$?
exit "${auto_retry_status}"
change_exit_code_if_known_flaky_tests || known_flaky_tests_exit_code=$?
exit "${known_flaky_tests_exit_code}"
fi
retry_failed_rspec_examples || rspec_retry_status=$?
@ -198,22 +198,17 @@ function handle_retry_rspec_in_new_process() {
# At this stage, we know the CI/CD job will fail.
#
# We'll change the exit code to auto-retry the CI job if the failure was due to a known flaky test.
auto_retry_exit_code_if_known_flaky_tests || auto_retry_status=$?
exit "${auto_retry_status}"
# We'll change the exit code of the CI job if the failure was due to a known flaky test.
change_exit_code_if_known_flaky_tests || known_flaky_tests_exit_code=$?
exit "${known_flaky_tests_exit_code}"
}
function auto_retry_exit_code_if_known_flaky_tests() {
function change_exit_code_if_known_flaky_tests() {
# Default exit status
rspec_retry_status=1
if [[ "${CI_AUTO_RETRY_JOBS_WITH_FLAKY_TESTS_ENABLED}" != "true" ]]; then
echoinfo "INFO: auto-retry of CI/CD job that failed due to a known flaky test is disabled because CI_AUTO_RETRY_JOBS_WITH_FLAKY_TESTS_ENABLED=${CI_AUTO_RETRY_JOBS_WITH_FLAKY_TESTS_ENABLED}."
return "${rspec_retry_status}"
fi
new_exit_code=1
echo "*******************************************************"
echo "Retry CI job if known flaky tests failed the job"
echo "Checking whether known flaky tests failed the job"
echo "*******************************************************"
found_known_flaky_tests_status=0
@ -222,34 +217,15 @@ function auto_retry_exit_code_if_known_flaky_tests() {
echo "${found_known_flaky_tests_output}"
if [[ $found_known_flaky_tests_status -eq 0 ]]; then
echo
echo "We'll ensure this CI/CD job is auto-retried (i.e. setting exit code: 112)."
echo "Changing the CI/CD job exit code to 112."
if [[ "${CI_AUTO_RETRY_JOBS_WITH_FLAKY_TESTS_NOTIFICATIONS_ENABLED}" == "true" ]]; then
comment=$(cat <<-EOF
Job ${CI_JOB_NAME} (${CI_JOB_URL}, ${CI_PIPELINE_URL}) failed because of a flaky test, and was auto-retried.
${found_known_flaky_tests_output}
EOF
)
echo
echo "Reporting to https://gitlab.com/gitlab-org/quality/engineering-productivity/team/-/issues/573 (project id: 34408484)"
new_comment_in_issue \
"34408484" \
"573" \
"${comment}" || true
fi
# Exit code for auto-retrying a job that had known flaky tests in it
#
# See .gitlab/ci/global.gitlab-ci.yml for the list of custom exit codes we use to auto-retry jobs
rspec_retry_status=112
new_exit_code=112
else
echo
echo "Not changing the CI/CD job exit code."
fi
return "${rspec_retry_status}"
return "${new_exit_code}"
}
function found_known_flaky_tests() {
@ -265,18 +241,6 @@ function found_known_flaky_tests() {
--health-problem-type failures;
}
function new_comment_in_issue() {
local project_id="${1}"
local issue_id="${2}"
local body="${3}"
curl --silent -o /dev/null --request POST \
--header "PRIVATE-TOKEN: ${TEST_FAILURES_PROJECT_TOKEN}" \
--header "Content-Type: application/x-www-form-urlencoded" \
--data "body=${body}" \
"${CI_API_V4_URL}/projects/${project_id}/issues/${issue_id}/notes" || true
}
function rspec_parallelized_job() {
echo "[$(date '+%H:%M:%S')] Starting rspec_parallelized_job"

View File

@ -53,6 +53,7 @@ describe('issue_comment_form component', () => {
const findAddToReviewButton = () => findAddToReviewDropdown().find('button');
const findAddCommentNowButton = () => wrapper.findByTestId('add-comment-now-button');
const findConfidentialNoteCheckbox = () => wrapper.findByTestId('internal-note-checkbox');
const findInternalNoteTooltipIcon = () => wrapper.findByTestId('question-o-icon');
const findCommentTypeDropdown = () => wrapper.findByTestId('comment-button');
const findCommentButton = () => findCommentTypeDropdown().find('button');
const findErrorAlerts = () => wrapper.findAllComponents(GlAlert).wrappers;
@ -716,7 +717,7 @@ describe('issue_comment_form component', () => {
expect(findConfidentialNoteCheckbox().exists()).toBe(true);
});
it('should not render checkbox if user is not at least a reporter', () => {
it('should not render checkbox if user is not at least a planner', () => {
mountComponent({
mountFunction: mountExtended,
initialData: { note: 'confidential note' },
@ -727,6 +728,18 @@ describe('issue_comment_form component', () => {
expect(checkbox.exists()).toBe(false);
});
it('should have the tooltip explaining the internal note capabilities', () => {
mountComponent({
mountFunction: mountExtended,
initialData: { note: 'confidential note' },
noteableData: { ...notableDataMockCanUpdateIssuable },
});
const tooltip = findInternalNoteTooltipIcon();
expect(tooltip.exists()).toBe(true);
expect(tooltip.attributes('title')).toBe(COMMENT_FORM.internalVisibility);
});
it.each`
noteableType | rendered | message
${'Issue'} | ${true} | ${'render'}

View File

@ -36,7 +36,7 @@ RSpec.describe Mutations::Ci::JobTokenScope::AddGroupOrProject, feature_category
let_it_be(:target_project) { create(:project) }
let_it_be(:target_project_path) { target_project.full_path }
let(:policies) { %w[read_project read_package] }
let(:policies) { %w[read_containers read_packages] }
let(:mutation_args) do
{ project_path: project.full_path, target_path: target_project_path, job_token_policies: policies }
@ -96,7 +96,7 @@ RSpec.describe Mutations::Ci::JobTokenScope::AddGroupOrProject, feature_category
let_it_be(:target_group) { create(:group, :private) }
let_it_be(:target_group_path) { target_group.full_path }
let(:policies) { %w[read_project read_package] }
let(:policies) { %w[read_containers read_packages] }
let(:mutation_args) do
{ project_path: project.full_path, target_path: target_group_path, job_token_policies: policies }

View File

@ -0,0 +1,8 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Types::Ci::JobTokenScope::JobTokenPolicyCategoryType, feature_category: :secrets_management do
specify { expect(described_class.graphql_name).to eq('JobTokenPolicyCategory') }
specify { expect(described_class).to have_graphql_fields(%i[text value description policies]) }
end

View File

@ -0,0 +1,8 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Types::Ci::JobTokenScope::JobTokenPolicyType, feature_category: :secrets_management do
specify { expect(described_class.graphql_name).to eq('JobTokenPolicy') }
specify { expect(described_class).to have_graphql_fields(%i[text value type description]) }
end

View File

@ -2,44 +2,25 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['CiJobTokenScopePolicies'], feature_category: :secrets_management do
RSpec.describe ::Types::Ci::JobTokenScope::PoliciesEnum, feature_category: :secrets_management do
it 'exposes all policies' do
expect(described_class.values.keys).to contain_exactly(*%w[
ADMIN_CONTAINER_IMAGE
ADMIN_SECURE_FILES
ADMIN_TERRAFORM_STATE
BUILD_CREATE_CONTAINER_IMAGE
BUILD_DESTROY_CONTAINER_IMAGE
BUILD_DOWNLOAD_CODE
BUILD_PUSH_CODE
BUILD_READ_CONTAINER_IMAGE
CREATE_DEPLOYMENT
CREATE_ENVIRONMENT
CREATE_ON_DEMAND_DAST_SCAN
CREATE_PACKAGE
CREATE_RELEASE
DESTROY_CONTAINER_IMAGE
DESTROY_DEPLOYMENT
DESTROY_ENVIRONMENT
DESTROY_PACKAGE
DESTROY_RELEASE
READ_BUILD
READ_CONTAINER_IMAGE
READ_DEPLOYMENT
READ_ENVIRONMENT
READ_GROUP
READ_JOB_ARTIFACTS
READ_PACKAGE
READ_PIPELINE
READ_PROJECT
READ_RELEASE
expect(described_class.values.keys).to match_array(%w[
READ_CONTAINERS
ADMIN_CONTAINERS
READ_DEPLOYMENTS
ADMIN_DEPLOYMENTS
READ_ENVIRONMENTS
ADMIN_ENVIRONMENTS
READ_JOBS
ADMIN_JOBS
READ_PACKAGES
ADMIN_PACKAGES
READ_RELEASES
ADMIN_RELEASES
READ_SECURE_FILES
ADMIN_SECURE_FILES
READ_TERRAFORM_STATE
STOP_ENVIRONMENT
UPDATE_DEPLOYMENT
UPDATE_ENVIRONMENT
UPDATE_PIPELINE
UPDATE_RELEASE
ADMIN_TERRAFORM_STATE
])
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Types::Ci::JobTokenScope::PolicyCategoriesEnum, feature_category: :secrets_management do
it 'exposes all categories' do
expect(described_class.values.keys).to match_array(%w[
CONTAINERS
DEPLOYMENTS
ENVIRONMENTS
JOBS
PACKAGES
RELEASES
SECURE_FILES
TERRAFORM_STATE
])
end
end

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Types::Ci::JobTokenScope::PolicyTypesEnum, feature_category: :secrets_management do
it 'the correct enum members' do
expect(described_class.values).to match(
'READ' => have_attributes(value: :read, description: 'Read-only access to the resource.'),
'ADMIN' => have_attributes(value: :admin, description: 'Admin access to the resource.')
)
end
end

View File

@ -177,4 +177,48 @@ RSpec.describe GitlabSchema.types['Query'], feature_category: :shared do
is_expected.to have_graphql_resolver(Resolvers::FeatureFlagResolver)
end
end
describe 'jobTokenPoliciesByCategory field' do
subject { described_class.fields['jobTokenPoliciesByCategory'] }
it 'returns job token policies', :aggregate_failures do
is_expected.to have_graphql_type(::Types::Ci::JobTokenScope::JobTokenPolicyCategoryType)
query = <<~GRAPHQL
query {
jobTokenPoliciesByCategory {
text
value
description
policies {
text
value
description
type
}
}
}
GRAPHQL
expected_result = ::Ci::JobToken::Policies::POLICIES_BY_CATEGORY.map do |category|
{
'text' => category[:text],
'value' => category[:value].upcase,
'description' => category[:description],
'policies' => category[:policies].map do |policy|
{
'text' => policy[:text],
'value' => policy[:value].upcase,
'description' => policy[:description],
'type' => policy[:type].upcase
}
end
}
end
result = GitlabSchema.execute(query).as_json.dig('data', 'jobTokenPoliciesByCategory')
expect(result).to eq(expected_result.as_json)
end
end
end

View File

@ -903,6 +903,14 @@ RSpec.describe ProjectsHelper, feature_category: :source_code_management do
it { is_expected.to be_falsey }
end
context 'when lfs_misconfiguration_banner feature flag is disabled' do
before do
stub_feature_flags(lfs_misconfiguration_banner: false)
end
it { is_expected.to be_falsey }
end
end
context 'when it does have a .gitattributes file' do

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillGroupWikiRepositoryStatesGroupId,
feature_category: :geo_replication,
schema: 20241125133011 do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :group_wiki_repository_states }
let(:backfill_column) { :group_id }
let(:backfill_via_table) { :group_wiki_repositories }
let(:backfill_via_column) { :group_id }
let(:backfill_via_foreign_key) { :group_wiki_repository_id }
end
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillGroupWikiRepositoryStatesGroupId, feature_category: :geo_replication do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :group_wiki_repository_states,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_main_cell,
job_arguments: [
:group_id,
:group_wiki_repositories,
:group_id,
:group_wiki_repository_id
]
)
}
end
end
end

View File

@ -67,7 +67,7 @@ RSpec.describe Ci::JobToken::Allowlist, feature_category: :continuous_integratio
describe 'add!' do
let_it_be(:added_project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:policies) { %w[read_package read_project] }
let_it_be(:policies) { %w[read_containers read_packages] }
subject(:add_project) { allowlist.add!(added_project, policies: policies, user: user) }
@ -107,7 +107,7 @@ RSpec.describe Ci::JobToken::Allowlist, feature_category: :continuous_integratio
describe 'add_group!' do
let_it_be(:added_group) { create(:group) }
let_it_be(:user) { create(:user) }
let_it_be(:policies) { %w[read_package read_project] }
let_it_be(:policies) { %w[read_containers read_packages] }
subject(:add_group) { allowlist.add_group!(added_group, policies: policies, user: user) }

View File

@ -74,12 +74,12 @@ RSpec.describe Ci::JobToken::GroupScopeLink, feature_category: :continuous_integ
using RSpec::Parameterized::TableSyntax
where(:value, :valid) do
nil | true
[] | true
%w[read_build] | true
%w[read_build read_project] | true
%w[read_issue] | false
{ project: %w[read_build] } | false
nil | true
[] | true
%w[read_containers] | true
%w[read_containers read_packages] | true
%w[read_issue] | false
{ project: %w[read_build] } | false
end
with_them do

View File

@ -78,12 +78,12 @@ RSpec.describe Ci::JobToken::ProjectScopeLink, feature_category: :continuous_int
using RSpec::Parameterized::TableSyntax
where(:value, :valid) do
nil | true
[] | true
%w[read_build] | true
%w[read_build read_project] | true
%w[read_issue] | false
{ project: %w[read_build] } | false
nil | true
[] | true
%w[read_containers] | true
%w[read_containers read_packages] | true
%w[read_issue] | false
{ project: %w[read_build] } | false
end
with_them do

View File

@ -70,7 +70,7 @@ RSpec.describe 'Querying CI_JOB_TOKEN allowlist for a project', feature_category
{
'addedBy' => { 'username' => current_user.username },
'direction' => 'inbound',
'jobTokenPolicies' => ['READ_GROUP'],
'jobTokenPolicies' => ['READ_CONTAINERS'],
'sourceProject' => { 'fullPath' => project.full_path },
'target' => { 'fullPath' => target_group_1.full_path }
}
@ -85,14 +85,14 @@ RSpec.describe 'Querying CI_JOB_TOKEN allowlist for a project', feature_category
{
'addedBy' => { 'username' => current_user.username },
'direction' => 'outbound',
'jobTokenPolicies' => ['READ_PROJECT'],
'jobTokenPolicies' => ['READ_CONTAINERS'],
'sourceProject' => { 'fullPath' => project.full_path },
'target' => { 'fullPath' => target_project_2.full_path }
},
{
'addedBy' => { 'username' => current_user.username },
'direction' => 'inbound',
'jobTokenPolicies' => ['READ_PROJECT'],
'jobTokenPolicies' => ['READ_CONTAINERS'],
'sourceProject' => { 'fullPath' => project.full_path },
'target' => { 'fullPath' => target_project_1.full_path }
}
@ -138,7 +138,7 @@ RSpec.describe 'Querying CI_JOB_TOKEN allowlist for a project', feature_category
:ci_job_token_project_scope_link,
source_project: project,
target_project: target_project_1,
job_token_policies: %w[read_project],
job_token_policies: %w[read_containers],
added_by: current_user,
direction: :inbound
)
@ -147,7 +147,7 @@ RSpec.describe 'Querying CI_JOB_TOKEN allowlist for a project', feature_category
:ci_job_token_project_scope_link,
source_project: project,
target_project: target_project_2,
job_token_policies: %w[read_project],
job_token_policies: %w[read_containers],
added_by: current_user,
direction: :outbound
)
@ -157,7 +157,7 @@ RSpec.describe 'Querying CI_JOB_TOKEN allowlist for a project', feature_category
source_project: project,
target_group: target_group_1,
added_by: current_user,
job_token_policies: %w[read_group]
job_token_policies: %w[read_containers]
)
end

View File

@ -7,7 +7,7 @@ RSpec.describe 'CiJobTokenScopeAddGroupOrProject', feature_category: :continuous
let_it_be(:project) { create(:project, ci_inbound_job_token_scope_enabled: true) }
let(:policies) { %w[READ_PROJECT] }
let(:policies) { %w[READ_CONTAINERS] }
let(:mutation_response) { graphql_mutation_response(:ci_job_token_scope_add_group_or_project) }

View File

@ -45,7 +45,7 @@ RSpec.describe 'CiJobTokenScopeUpdatePolicies', feature_category: :continuous_in
let_it_be(:target_project) { create(:project, :private) }
let_it_be(:target_path) { target_project.full_path }
let(:policies) { %w[READ_PROJECT READ_PACKAGE] }
let(:policies) { %w[READ_CONTAINERS READ_PACKAGES] }
context 'when user does not have permissions to admin project' do
let_it_be(:current_user) { create(:user, guest_of: target_project) }
@ -74,7 +74,7 @@ RSpec.describe 'CiJobTokenScopeUpdatePolicies', feature_category: :continuous_in
:ci_job_token_project_scope_link,
source_project: project,
target_project: target_project,
job_token_policies: %w[read_project],
job_token_policies: %w[read_containers],
direction: :inbound
)
end
@ -123,7 +123,7 @@ RSpec.describe 'CiJobTokenScopeUpdatePolicies', feature_category: :continuous_in
let_it_be(:target_group) { create(:group, :private) }
let_it_be(:target_path) { target_group.full_path }
let(:policies) { %w[READ_GROUP READ_PACKAGE] }
let(:policies) { %w[READ_CONTAINERS READ_PACKAGES] }
context 'when user does not have permissions to admin project' do
let_it_be(:current_user) { create(:user, guest_of: target_group) }
@ -152,7 +152,7 @@ RSpec.describe 'CiJobTokenScopeUpdatePolicies', feature_category: :continuous_in
:ci_job_token_group_scope_link,
source_project: project,
target_group: target_group,
job_token_policies: %w[read_group]
job_token_policies: %w[read_containers]
)
end

View File

@ -51,6 +51,7 @@ RSpec.describe Ci::CancelPipelineService, :aggregate_failures, feature_category:
.to have_received(:info)
.with(
a_hash_including(
class: described_class.to_s,
event: 'pipeline_cancel_running',
pipeline_id: pipeline.id,
auto_canceled_by_pipeline_id: nil,

View File

@ -7,7 +7,7 @@ RSpec.describe Ci::JobTokenScope::AddGroupOrProjectService, feature_category: :c
let_it_be(:target_project) { create(:project) }
let_it_be(:target_group) { create(:group) }
let_it_be(:current_user) { create(:user) }
let_it_be(:policies) { %w[read_project read_package] }
let_it_be(:policies) { %w[read_containers read_packages] }
let(:response_success) { ServiceResponse.success }
@ -36,7 +36,7 @@ RSpec.describe Ci::JobTokenScope::AddGroupOrProjectService, feature_category: :c
context 'when project is a target to add' do
let(:target) { target_project }
let(:add_project_service_double) { instance_double(::Ci::JobTokenScope::AddProjectService) }
let(:policies) { %w[read_project] }
let(:policies) { %w[read_containers] }
before do
allow(::Ci::JobTokenScope::AddProjectService).to receive(:new)

View File

@ -6,7 +6,7 @@ RSpec.describe Ci::JobTokenScope::AddGroupService, feature_category: :continuous
let_it_be(:project) { create(:project, ci_outbound_job_token_scope_enabled: true).tap(&:save!) }
let_it_be(:target_group) { create(:group, :private) }
let_it_be(:current_user) { create(:user) }
let_it_be(:policies) { %w[read_project read_package] }
let_it_be(:policies) { %w[read_containers read_packages] }
let(:service) { described_class.new(project, current_user) }

View File

@ -7,7 +7,7 @@ RSpec.describe Ci::JobTokenScope::AddProjectService, feature_category: :continuo
let_it_be(:project) { create(:project, ci_outbound_job_token_scope_enabled: true).tap(&:save!) }
let_it_be(:target_project) { create(:project) }
let_it_be(:current_user) { create(:user) }
let_it_be(:policies) { %w[read_project read_package] }
let_it_be(:policies) { %w[read_containers read_packages] }
shared_examples 'adds project' do |context|
it 'adds the project to the scope', :aggregate_failures do

View File

@ -78,12 +78,12 @@ RSpec.describe Ci::JobTokenScope::UpdatePoliciesService, feature_category: :cont
:ci_job_token_project_scope_link,
source_project: project,
target_project: target_project,
job_token_policies: %w[read_project],
job_token_policies: %w[read_containers],
direction: :inbound
)
end
let(:policies) { %w[read_project read_package] }
let(:policies) { %w[read_containers read_packages] }
it_behaves_like 'when user is not logged in'
@ -106,7 +106,7 @@ RSpec.describe Ci::JobTokenScope::UpdatePoliciesService, feature_category: :cont
expect(project_link.source_project).to eq(project)
expect(project_link.target_project).to eq(target_project)
expect(project_link.job_token_policies).to eq(%w[read_project read_package])
expect(project_link.job_token_policies).to eq(%w[read_containers read_packages])
end
context 'when feature-flag `add_policies_to_ci_job_token` is disabled' do
@ -117,7 +117,7 @@ RSpec.describe Ci::JobTokenScope::UpdatePoliciesService, feature_category: :cont
it 'does not update the policies' do
project_link = Ci::JobToken::ProjectScopeLink.last
expect(project_link.job_token_policies).to eq(%w[read_project])
expect(project_link.job_token_policies).to eq(%w[read_containers])
end
end
end
@ -132,11 +132,11 @@ RSpec.describe Ci::JobTokenScope::UpdatePoliciesService, feature_category: :cont
:ci_job_token_group_scope_link,
source_project: project,
target_group: target_group,
job_token_policies: %w[read_group]
job_token_policies: %w[read_containers]
)
end
let(:policies) { %w[read_group read_package] }
let(:policies) { %w[read_containers read_packages] }
it_behaves_like 'when user is not logged in'
@ -159,7 +159,7 @@ RSpec.describe Ci::JobTokenScope::UpdatePoliciesService, feature_category: :cont
expect(group_link.source_project).to eq(project)
expect(group_link.target_group).to eq(target_group)
expect(group_link.job_token_policies).to eq(%w[read_group read_package])
expect(group_link.job_token_policies).to eq(%w[read_containers read_packages])
end
context 'when feature-flag `add_policies_to_ci_job_token` is disabled' do
@ -170,7 +170,7 @@ RSpec.describe Ci::JobTokenScope::UpdatePoliciesService, feature_category: :cont
it 'does not update the policies' do
group_link = Ci::JobToken::GroupScopeLink.last
expect(group_link.job_token_policies).to eq(%w[read_group])
expect(group_link.job_token_policies).to eq(%w[read_containers])
end
end
end

View File

@ -51,7 +51,8 @@ RSpec.shared_context 'with FOSS query type fields' do
:audit_event_definitions,
:abuse_report,
:abuse_report_labels,
:feature_flag_enabled
:feature_flag_enabled,
:job_token_policies_by_category
]
end
end

View File

@ -20,7 +20,6 @@ import {
PDF_JS_CMAPS_V4_PUBLIC_PATH,
} from './config/pdfjs.constants';
/* eslint-disable import/extensions */
import { viteTailwindCompilerPlugin } from './scripts/frontend/tailwindcss.mjs';
import { CopyPlugin } from './config/helpers/vite_plugin_copy.mjs';
import { AutoStopPlugin } from './config/helpers/vite_plugin_auto_stop.mjs';
@ -29,7 +28,6 @@ import { FixedRubyPlugin } from './config/helpers/vite_plugin_ruby_fixed.mjs';
import { StylePlugin } from './config/helpers/vite_plugin_style.mjs';
import { IconsPlugin } from './config/helpers/vite_plugin_icons.mjs';
import { ImagesPlugin } from './config/helpers/vite_plugin_images.mjs';
/* eslint-enable import/extensions */
let viteGDKConfig;
try {