Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-01-14 18:35:58 +00:00
parent 98fdc0b091
commit ec28417ade
73 changed files with 1385 additions and 681 deletions

View File

@ -6,7 +6,6 @@ RSpec/AvoidConditionalStatements:
- 'ee/spec/features/billings/billing_plans_spec.rb'
- 'ee/spec/features/boards/scoped_issue_board_spec.rb'
- 'ee/spec/features/boards/user_visits_board_spec.rb'
- 'ee/spec/features/ci_shared_runner_warnings_spec.rb'
- 'ee/spec/features/epic_boards/epic_boards_spec.rb'
- 'ee/spec/features/epics/gfm_autocomplete_spec.rb'
- 'ee/spec/features/group_protected_branches_spec.rb'

View File

@ -5,7 +5,6 @@ RSpec/NoExpectationExample:
- 'ee/spec/features/billings/billing_plans_spec.rb'
- 'ee/spec/features/boards/swimlanes/epics_swimlanes_drag_drop_spec.rb'
- 'ee/spec/features/boards/swimlanes/epics_swimlanes_filtering_spec.rb'
- 'ee/spec/features/ci_shared_runner_warnings_spec.rb'
- 'ee/spec/features/epic_boards/epic_boards_sidebar_spec.rb'
- 'ee/spec/features/groups/add_ons/discover_duo_pro_hand_raise_lead_spec.rb'
- 'ee/spec/features/groups/discovers/hand_raise_lead_spec.rb'

View File

@ -41,6 +41,7 @@ import {
I18N_BULK_DELETE_PARTIAL_ERROR,
I18N_BULK_DELETE_CONFIRMATION_TOAST,
I18N_BULK_DELETE_MAX_SELECTED,
I18N_CHECKBOX,
} from '../constants';
import JobCheckbox from './job_checkbox.vue';
import ArtifactsBulkDelete from './artifacts_bulk_delete.vue';
@ -149,7 +150,7 @@ export default {
return [
{
key: 'checkbox',
label: '',
label: I18N_CHECKBOX,
thClass: 'gl-w-1/20',
},
...this.$options.fields,

View File

@ -5,6 +5,7 @@ export const TOTAL_ARTIFACTS_SIZE = s__('Artifacts|Total artifacts size');
export const SIZE_UNKNOWN = __('Unknown');
export const I18N_DOWNLOAD = __('Download');
export const I18N_CHECKBOX = __('Select artifacts');
export const I18N_BROWSE = s__('Artifacts|Browse');
export const I18N_DELETE = __('Delete');
export const I18N_EXPIRED = __('Expired');

View File

@ -1,5 +1,5 @@
<script>
import { GlButton, GlButtonGroup, GlLink, GlPopover } from '@gitlab/ui';
import { GlBadge, GlButton, GlButtonGroup, GlLink, GlPopover } from '@gitlab/ui';
import { s__ } from '~/locale';
import { helpPagePath } from '~/helpers/help_page_helper';
import TimeagoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
@ -23,6 +23,7 @@ export default {
},
artifactsHelpPath: helpPagePath('ci/jobs/job_artifacts'),
components: {
GlBadge,
GlButton,
GlButtonGroup,
GlLink,
@ -40,6 +41,10 @@ export default {
type: String,
required: true,
},
reports: {
type: Array,
required: true,
},
},
computed: {
isExpired() {
@ -52,24 +57,35 @@ export default {
willExpire() {
return this.artifact?.expired === false && !this.isLocked;
},
hasReports() {
return this.reports.length > 0;
},
},
};
</script>
<template>
<div>
<div class="title gl-font-bold">
<span class="gl-mr-2">{{ $options.i18n.jobArtifacts }}</span>
<gl-link :href="$options.artifactsHelpPath" data-testid="artifacts-help-link">
<help-icon id="artifacts-help" />
</gl-link>
<gl-popover
target="artifacts-help"
:title="$options.i18n.jobArtifacts"
triggers="hover focus"
>
{{ $options.i18n.artifactsHelpText }}
</gl-popover>
<div class="gl-flex gl-items-center">
<div class="title gl-font-bold">
<span class="gl-mr-2">{{ $options.i18n.jobArtifacts }}</span>
<gl-link :href="$options.artifactsHelpPath" data-testid="artifacts-help-link">
<help-icon id="artifacts-help" />
</gl-link>
<gl-popover
target="artifacts-help"
:title="$options.i18n.jobArtifacts"
triggers="hover focus"
>
{{ $options.i18n.artifactsHelpText }}
</gl-popover>
</div>
<span v-if="hasReports" class="gl-ml-2">
<gl-badge v-for="(report, index) in reports" :key="index" class="gl-mr-2">
{{ report.file_type }}
</gl-badge>
</span>
</div>
<p
v-if="isExpired || willExpire"
class="build-detail-row"

View File

@ -68,6 +68,9 @@ export default {
externalLinks() {
return filterAnnotations(this.job.annotations, 'external_link');
},
reports() {
return this.job?.reports || [];
},
},
watch: {
job(value, oldValue) {
@ -101,6 +104,7 @@ export default {
v-if="hasArtifact"
class="gl-border-b gl-border-subtle gl-py-4"
:artifact="artifact"
:reports="reports"
:help-url="artifactHelpUrl"
/>

View File

@ -87,7 +87,9 @@ export default {
</template>
<template v-if="!hasMergeRequests && !loading" #empty>
{{ __('No merge requests match this list.') }}
<p class="gl-pt-1 gl-text-center gl-text-subtle">
{{ __('No merge requests match this list.') }}
</p>
</template>
<template #default>

View File

@ -107,6 +107,7 @@ export default {
placement="bottom-end"
no-caret
data-testid="wiki-more-dropdown"
class="print:gl-hidden"
@shown="showDropdown"
@hidden="hideDropdown"
>

View File

@ -111,7 +111,7 @@ export default {
icon="ellipsis_v"
category="tertiary"
placement="bottom-end"
class="note-action-button more-actions-toggle"
class="note-action-button more-actions-toggle print:gl-hidden"
no-caret
>
<gl-disclosure-dropdown-item

View File

@ -34,7 +34,7 @@ export function expandSection(sectionArg) {
}
InternalEvents.trackEvent('click_expand_panel_on_settings', {
label: $section.find('.settings-title').text(),
label: $section.find('[data-event-tracking="settings-block-title"]').text(),
});
}

View File

@ -88,7 +88,7 @@ export default {
data-testid="settings-block-title"
@click="toggleExpanded"
>
<h2 class="gl-heading-2 !gl-mb-2">
<h2 class="gl-heading-2 !gl-mb-2" data-event-tracking="settings-block-title">
{{ title }}
</h2>
</button>

View File

@ -140,11 +140,6 @@ ul.wiki-pages-list.content-list {
.container-limited.limit-container-width {
max-width: 100%;
}
// stylelint-disable-next-line gitlab/no-gl-class
.gl-disclosure-dropdown {
display: none !important;
}
}
.edited-text {

View File

@ -6,7 +6,7 @@
= render Pajamas::ButtonComponent.new(category: :tertiary, size: :small, icon: 'chevron-lg-right', icon_classes: '!-gl-mx-2', button_text_classes: 'gl-sr-only', button_options: @button_options.merge(class: 'settings-toggle js-settings-toggle', 'aria-label': aria_label)) do
= button_text
.gl-grow
%h2{ class: title_classes }
%h2{ class: title_classes, data: { 'event-tracking': 'settings-block-title' } }
= heading || @heading
- if description || @description
%p.gl-text-subtle.gl-m-0

View File

@ -10,6 +10,10 @@ module Types
field :link_created_at, Types::TimeType,
description: 'Timestamp the link was created.', null: false,
method: :issue_link_created_at
field :work_item_state, Types::WorkItemStateEnum,
description: 'State of the linked work item.', null: false, method: :state
field :link_id, ::Types::GlobalIDType[::WorkItems::RelatedWorkItemLink],
description: 'Global ID of the link.', null: false,
method: :issue_link_id

View File

@ -2,13 +2,15 @@
module Ci
module JobToken
class Allowlist
include ::Gitlab::Utils::StrongMemoize
def initialize(source_project, direction: :inbound)
@source_project = source_project
@direction = direction
end
def includes_project?(target_project)
source_links
project_links
.with_target(target_project)
.exists?
end
@ -18,7 +20,7 @@ module Ci
end
def nearest_scope_for_target_project(target_project)
source_links.with_target(target_project).first ||
project_links.with_target(target_project).first ||
group_links_for_target(target_project).first
end
@ -57,13 +59,7 @@ module Ci
)
end
private
def add_policies_to_ci_job_token_enabled
Feature.enabled?(:add_policies_to_ci_job_token, @source_project)
end
def source_links
def project_links
Ci::JobToken::ProjectScopeLink
.with_source(@source_project)
.where(direction: @direction)
@ -74,6 +70,49 @@ module Ci
.with_source(@source_project)
end
def bulk_add_projects!(target_projects, user:, autopopulated: false, policies: [])
now = Time.zone.now
job_token_policies = add_policies_to_ci_job_token_enabled ? policies : []
projects = target_projects.map do |target_project|
Ci::JobToken::ProjectScopeLink.new(
source_project_id: @source_project.id,
target_project: target_project,
autopopulated: autopopulated,
added_by: user,
job_token_policies: job_token_policies,
created_at: now
)
end
Ci::JobToken::ProjectScopeLink.bulk_insert!(projects)
end
def bulk_add_groups!(target_groups, user:, autopopulated: false, policies: [])
now = Time.zone.now
job_token_policies = add_policies_to_ci_job_token_enabled ? policies : []
groups = target_groups.map do |target_group|
Ci::JobToken::GroupScopeLink.new(
source_project_id: @source_project.id,
target_group: target_group,
autopopulated: autopopulated,
added_by: user,
job_token_policies: job_token_policies,
created_at: now
)
end
Ci::JobToken::GroupScopeLink.bulk_insert!(groups)
end
private
def add_policies_to_ci_job_token_enabled
Feature.enabled?(:add_policies_to_ci_job_token, @source_project)
end
strong_memoize_attr :add_policies_to_ci_job_token_enabled
def group_links_for_target(target_project)
target_group_ids = target_project.parent_groups.pluck(:id)
group_links.where(target_group_id: target_group_ids).order(
@ -84,7 +123,7 @@ module Ci
end
def target_project_ids
source_links
project_links
# pluck needed to avoid ci and main db join
.pluck(:target_project_id)
end

View File

@ -6,6 +6,8 @@
module Ci
module JobToken
class GroupScopeLink < Ci::ApplicationRecord
include BulkInsertSafe
self.table_name = 'ci_job_token_group_scope_links'
GROUP_LINK_LIMIT = 200
@ -19,6 +21,7 @@ module Ci
scope :with_source, ->(project) { where(source_project: project) }
scope :with_target, ->(group) { where(target_group: group) }
scope :autopopulated, -> { where(autopopulated: true) }
validates :source_project, presence: true
validates :target_group, presence: true

View File

@ -6,6 +6,8 @@
module Ci
module JobToken
class ProjectScopeLink < Ci::ApplicationRecord
include BulkInsertSafe
self.table_name = 'ci_job_token_project_scope_links'
PROJECT_LINK_DIRECTIONAL_LIMIT = 200
@ -20,6 +22,7 @@ module Ci
scope :with_access_direction, ->(direction) { where(direction: direction) }
scope :with_source, ->(project) { where(source_project: project) }
scope :with_target, ->(project) { where(target_project: project) }
scope :autopopulated, -> { where(autopopulated: true) }
validates :source_project, presence: true
validates :target_project, presence: true

View File

@ -0,0 +1,182 @@
# frozen_string_literal: true
module Integrations
module Base
module Bamboo
extend ActiveSupport::Concern
class_methods do
def title
s_('BambooService|Atlassian Bamboo')
end
def description
s_('BambooService|Run CI/CD pipelines with Atlassian Bamboo.')
end
def help
build_help_page_url(
'user/project/integrations/bamboo.md',
s_('BambooService|Run CI/CD pipelines with Atlassian Bamboo. You must set up automatic revision ' \
'labeling and a repository trigger in Bamboo.')
)
end
def to_param
'bamboo'
end
end
included do
include Base::Ci
include ReactivelyCached
prepend EnableSslVerification
field :bamboo_url,
title: -> { s_('BambooService|Bamboo URL') },
placeholder: -> { s_('https://bamboo.example.com') },
help: -> { s_('BambooService|Bamboo root URL.') },
description: -> { s_('Bamboo root URL (for example, `https://bamboo.example.com`).') },
exposes_secrets: true,
required: true
field :build_key,
help: -> { s_('BambooService|Bamboo build plan key.') },
description: -> { s_('Bamboo build plan key (for example, `KEY`).') },
non_empty_password_title: -> { s_('BambooService|Enter new build key') },
non_empty_password_help: -> { s_('BambooService|Leave blank to use your current build key.') },
placeholder: -> { _('KEY') },
required: true,
is_secret: true
field :username,
help: -> { s_('BambooService|User with API access to the Bamboo server.') },
description: -> { s_('User with API access to the Bamboo server.') },
required: true
field :password,
type: :password,
non_empty_password_title: -> { s_('ProjectService|Enter new password') },
non_empty_password_help: -> { s_('ProjectService|Leave blank to use your current password') },
description: -> { s_('Password of the user.') },
required: true
with_options if: :activated? do
validates :bamboo_url, presence: true, public_url: true
validates :build_key, presence: true
end
validates :username, presence: true, if: ->(integration) { integration.activated? && integration.password }
validates :password, presence: true, if: ->(integration) { integration.activated? && integration.username }
attr_accessor :response
def calculate_reactive_cache(sha, _ref)
response = try_get_path("rest/api/latest/result/byChangeset/#{sha}")
{ build_page: read_build_page(response), commit_status: read_commit_status(response) }
end
def build_page(sha, ref)
with_reactive_cache(sha, ref) { |cached| cached[:build_page] }
end
def commit_status(sha, ref)
with_reactive_cache(sha, ref) { |cached| cached[:commit_status] }
end
end
def execute(data)
return unless supported_events.include?(data[:object_kind])
get_path("updateAndBuild.action", { buildKey: build_key })
end
def avatar_url
ActionController::Base.helpers.image_path(
'illustrations/third-party-logos/integrations-logos/atlassian-bamboo.svg'
)
end
private
def get_build_result(response)
return if response&.code != 200
# May be nil if no result, a single result hash, or an array if multiple results for a given changeset.
result = response.dig('results', 'results', 'result')
# In case of multiple results, arbitrarily assume the last one is the most relevant.
return result.last if result.is_a?(Array)
result
end
def read_build_page(response)
result = get_build_result(response)
key =
if result.blank?
# If actual build link can't be determined, send user to build summary page.
build_key
else
# If actual build link is available, go to build result page.
result.dig('planResultKey', 'key')
end
build_url("browse/#{key}")
end
def read_commit_status(response)
return :error unless response && (response.code == 200 || response.code == 404)
result = get_build_result(response)
status =
if result.blank?
'Pending'
else
result['buildState']
end
return :error unless status.present?
if status.include?('Success')
'success'
elsif status.include?('Failed')
'failed'
elsif status.include?('Pending')
'pending'
else
:error
end
end
def try_get_path(path, query_params = {})
params = build_get_params(query_params)
params[:extra_log_info] = { project_id: project_id }
Gitlab::HTTP.try_get(build_url(path), params)
end
def get_path(path, query_params = {})
Gitlab::HTTP.get(build_url(path), build_get_params(query_params))
end
def build_url(path)
Gitlab::Utils.append_path(bamboo_url, path)
end
def build_get_params(query_params)
params = { verify: enable_ssl_verification, query: query_params }
return params if username.blank? && password.blank?
query_params[:os_authType] = 'basic'
params[:basic_auth] = basic_auth
params
end
def basic_auth
{ username: username, password: password }
end
end
end
end

View File

@ -2,173 +2,6 @@
module Integrations
class Bamboo < Integration
include Base::Ci
include ReactivelyCached
prepend EnableSslVerification
field :bamboo_url,
title: -> { s_('BambooService|Bamboo URL') },
placeholder: -> { s_('https://bamboo.example.com') },
help: -> { s_('BambooService|Bamboo root URL.') },
description: -> { s_('Bamboo root URL (for example, `https://bamboo.example.com`).') },
exposes_secrets: true,
required: true
field :build_key,
help: -> { s_('BambooService|Bamboo build plan key.') },
description: -> { s_('Bamboo build plan key (for example, `KEY`).') },
non_empty_password_title: -> { s_('BambooService|Enter new build key') },
non_empty_password_help: -> { s_('BambooService|Leave blank to use your current build key.') },
placeholder: -> { _('KEY') },
required: true,
is_secret: true
field :username,
help: -> { s_('BambooService|User with API access to the Bamboo server.') },
description: -> { s_('User with API access to the Bamboo server.') },
required: true
field :password,
type: :password,
non_empty_password_title: -> { s_('ProjectService|Enter new password') },
non_empty_password_help: -> { s_('ProjectService|Leave blank to use your current password') },
description: -> { s_('Password of the user.') },
required: true
with_options if: :activated? do
validates :bamboo_url, presence: true, public_url: true
validates :build_key, presence: true
end
validates :username, presence: true, if: ->(integration) { integration.activated? && integration.password }
validates :password, presence: true, if: ->(integration) { integration.activated? && integration.username }
attr_accessor :response
def self.title
s_('BambooService|Atlassian Bamboo')
end
def self.description
s_('BambooService|Run CI/CD pipelines with Atlassian Bamboo.')
end
def self.help
build_help_page_url(
'user/project/integrations/bamboo.md',
s_('BambooService|Run CI/CD pipelines with Atlassian Bamboo. You must set up automatic revision ' \
'labeling and a repository trigger in Bamboo.')
)
end
def self.to_param
'bamboo'
end
def build_page(sha, ref)
with_reactive_cache(sha, ref) { |cached| cached[:build_page] }
end
def commit_status(sha, ref)
with_reactive_cache(sha, ref) { |cached| cached[:commit_status] }
end
def execute(data)
return unless supported_events.include?(data[:object_kind])
get_path("updateAndBuild.action", { buildKey: build_key })
end
def calculate_reactive_cache(sha, _ref)
response = try_get_path("rest/api/latest/result/byChangeset/#{sha}")
{ build_page: read_build_page(response), commit_status: read_commit_status(response) }
end
def avatar_url
ActionController::Base.helpers.image_path(
'illustrations/third-party-logos/integrations-logos/atlassian-bamboo.svg'
)
end
private
def get_build_result(response)
return if response&.code != 200
# May be nil if no result, a single result hash, or an array if multiple results for a given changeset.
result = response.dig('results', 'results', 'result')
# In case of multiple results, arbitrarily assume the last one is the most relevant.
return result.last if result.is_a?(Array)
result
end
def read_build_page(response)
result = get_build_result(response)
key =
if result.blank?
# If actual build link can't be determined, send user to build summary page.
build_key
else
# If actual build link is available, go to build result page.
result.dig('planResultKey', 'key')
end
build_url("browse/#{key}")
end
def read_commit_status(response)
return :error unless response && (response.code == 200 || response.code == 404)
result = get_build_result(response)
status =
if result.blank?
'Pending'
else
result['buildState']
end
return :error unless status.present?
if status.include?('Success')
'success'
elsif status.include?('Failed')
'failed'
elsif status.include?('Pending')
'pending'
else
:error
end
end
def try_get_path(path, query_params = {})
params = build_get_params(query_params)
params[:extra_log_info] = { project_id: project_id }
Gitlab::HTTP.try_get(build_url(path), params)
end
def get_path(path, query_params = {})
Gitlab::HTTP.get(build_url(path), build_get_params(query_params))
end
def build_url(path)
Gitlab::Utils.append_path(bamboo_url, path)
end
def build_get_params(query_params)
params = { verify: enable_ssl_verification, query: query_params }
return params if username.blank? && password.blank?
query_params[:os_authType] = 'basic'
params[:basic_auth] = basic_auth
params
end
def basic_auth
{ username: username, password: password }
end
include Integrations::Base::Bamboo
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
module Integrations
module Instance
class Bamboo < Integration
include Integrations::Base::Bamboo
end
end
end

View File

@ -0,0 +1,43 @@
# frozen_string_literal: true
module Ci
module JobToken
class AutopopulateAllowlistService
include ::Gitlab::Loggable
include ::Gitlab::Utils::StrongMemoize
COMPACTION_LIMIT = Ci::JobToken::ProjectScopeLink::PROJECT_LINK_DIRECTIONAL_LIMIT
def initialize(project, user)
@project = project
@user = user
end
def execute
raise Gitlab::Access::AccessDeniedError unless authorized?
allowlist = Ci::JobToken::Allowlist.new(@project)
groups = compactor.allowlist_groups
projects = compactor.allowlist_projects
ApplicationRecord.transaction do
allowlist.bulk_add_groups!(groups, user: @user, autopopulated: true)
allowlist.bulk_add_projects!(projects, user: @user, autopopulated: true)
end
end
private
def compactor
Ci::JobToken::AuthorizationsCompactor.new(@project.id).tap do |compactor|
compactor.compact(COMPACTION_LIMIT)
end
end
strong_memoize_attr :compactor
def authorized?
@user.can?(:admin_project, @project)
end
end
end
end

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
module Ci
module JobToken
class ClearAutopopulatedAllowlistService
def initialize(project, user)
@project = project
@user = user
end
def execute
raise Gitlab::Access::AccessDeniedError unless authorized?
allowlist = Ci::JobToken::Allowlist.new(@project)
ApplicationRecord.transaction do
allowlist.project_links.autopopulated.delete_all
allowlist.group_links.autopopulated.delete_all
end
end
private
def authorized?
@user.can?(:admin_project, @project)
end
end
end
end

View File

@ -114,6 +114,9 @@ end
Settings.omniauth['providers'] ||= []
Settings['oidc_provider'] ||= {}
Settings.oidc_provider['openid_id_token_expire_in_seconds'] = 120 if Settings.oidc_provider['openid_id_token_expire_in_seconds'].nil?
# Handle backward compatibility with the renamed kerberos_spnego provider
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/96335#note_1094265436
Gitlab.ee do

View File

@ -23,6 +23,8 @@ Doorkeeper::OpenidConnect.configure do
user.id
end
expiration Gitlab.config.oidc_provider.openid_id_token_expire_in_seconds
claims do
with_options scope: :openid do |o|
o.claim(:sub_legacy, response: [:id_token, :user_info]) do |user|
@ -55,7 +57,9 @@ Doorkeeper::OpenidConnect.configure do
end
end
o.claim(:website, response: [:id_token, :user_info]) { |user| user.full_website_url if user.website_url.present? }
o.claim(:website, response: [:id_token, :user_info]) do |user|
user.full_website_url if user.website_url.present?
end
o.claim(:profile, response: [:id_token, :user_info]) { |user| Gitlab::Routing.url_helpers.user_url user }
o.claim(:picture, response: [:id_token, :user_info]) { |user| user.avatar_url(only_path: false) }
o.claim(:groups) do |user|

View File

@ -783,8 +783,6 @@
- 1
- - search_zoekt_default_branch_changed
- 1
- - search_zoekt_delete_project
- 1
- - search_zoekt_delete_project_event
- 1
- - search_zoekt_index_marked_as_to_delete_event

View File

@ -8,14 +8,6 @@ description: Persists metadata between users and alerts to support alert assignm
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/32609
milestone: '13.1'
gitlab_schema: gitlab_main_cell
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: alert_id
table: alert_management_alerts
sharding_key: project_id
belongs_to: alert
desired_sharding_key_migration_job_name: BackfillAlertManagementAlertAssigneesProjectId
table_size: small
sharding_key:
project_id: projects

View File

@ -8,14 +8,6 @@ description: Information relating epic boards to labels used to scope the boards
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/48658
milestone: '13.7'
gitlab_schema: gitlab_main_cell
desired_sharding_key:
group_id:
references: namespaces
backfill_via:
parent:
foreign_key: epic_board_id
table: boards_epic_boards
sharding_key: group_id
belongs_to: epic_board
desired_sharding_key_migration_job_name: BackfillBoardsEpicBoardLabelsGroupId
table_size: small
sharding_key:
group_id: namespaces

View File

@ -8,14 +8,6 @@ description: Secret variables used in DAST on-demand scans
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56067
milestone: '13.11'
gitlab_schema: gitlab_sec
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: dast_site_profile_id
table: dast_site_profiles
sharding_key: project_id
belongs_to: dast_site_profile
desired_sharding_key_migration_job_name: BackfillDastSiteProfileSecretVariablesProjectId
table_size: small
sharding_key:
project_id: projects

View File

@ -4,6 +4,7 @@ classes:
- Integrations::Instance::Integration
- Integrations::Instance::Asana
- Integrations::Instance::Assembla
- Integrations::Instance::Bamboo
feature_categories:
- integrations
description: Support 3rd party instance-wide integrations

View File

@ -8,14 +8,6 @@ description: Debian project-level distribution keys
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/60993
milestone: '14.0'
gitlab_schema: gitlab_main_cell
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: distribution_id
table: packages_debian_project_distributions
sharding_key: project_id
belongs_to: distribution
desired_sharding_key_migration_job_name: BackfillPackagesDebianProjectDistributionKeysProjectId
table_size: small
sharding_key:
project_id: projects

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
class AddAlertManagementAlertAssigneesProjectIdNotNullConstraint < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.9'
def up
add_not_null_constraint :alert_management_alert_assignees, :project_id
end
def down
remove_not_null_constraint :alert_management_alert_assignees, :project_id
end
end

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
class AddBoardsEpicBoardLabelsGroupIdNotNullConstraint < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.9'
def up
add_not_null_constraint :boards_epic_board_labels, :group_id
end
def down
remove_not_null_constraint :boards_epic_board_labels, :group_id
end
end

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
class AddDastSiteProfileSecretVariablesProjectIdNotNullConstraint < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.9'
def up
add_not_null_constraint :dast_site_profile_secret_variables, :project_id
end
def down
remove_not_null_constraint :dast_site_profile_secret_variables, :project_id
end
end

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
class AddPackagesDebianProjectDistributionKeysProjectIdNotNullConstraint < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.9'
def up
add_not_null_constraint :packages_debian_project_distribution_keys, :project_id
end
def down
remove_not_null_constraint :packages_debian_project_distribution_keys, :project_id
end
end

View File

@ -0,0 +1 @@
514b4c6e67e6e9b469db77734152cd38f5c4d43868a185464deaaf509dec736c

View File

@ -0,0 +1 @@
e001998d20cb31a4683f52a20f72cce7b84395fbf974b17977c0de9e2e630ac9

View File

@ -0,0 +1 @@
4c501c4ed54c253baaa09b1d8a9dc1eef98c03e88c95335954f02b52382e41a1

View File

@ -0,0 +1 @@
db76554ea42303ebd04c52699dea84b691adbfba94e4ab42838c32cd4ab0ee81

View File

@ -6948,7 +6948,8 @@ CREATE TABLE alert_management_alert_assignees (
id bigint NOT NULL,
user_id bigint NOT NULL,
alert_id bigint NOT NULL,
project_id bigint
project_id bigint,
CONSTRAINT check_f3efe02c81 CHECK ((project_id IS NOT NULL))
);
CREATE SEQUENCE alert_management_alert_assignees_id_seq
@ -9087,7 +9088,8 @@ CREATE TABLE boards_epic_board_labels (
id bigint NOT NULL,
epic_board_id bigint NOT NULL,
label_id bigint NOT NULL,
group_id bigint
group_id bigint,
CONSTRAINT check_c71449be47 CHECK ((group_id IS NOT NULL))
);
CREATE SEQUENCE boards_epic_board_labels_id_seq
@ -11856,7 +11858,8 @@ CREATE TABLE dast_site_profile_secret_variables (
project_id bigint,
CONSTRAINT check_236213f179 CHECK ((length(encrypted_value) <= 13352)),
CONSTRAINT check_8cbef204b2 CHECK ((char_length(key) <= 255)),
CONSTRAINT check_b49080abbf CHECK ((length(encrypted_value_iv) <= 17))
CONSTRAINT check_b49080abbf CHECK ((length(encrypted_value_iv) <= 17)),
CONSTRAINT check_d972e5f59d CHECK ((project_id IS NOT NULL))
);
COMMENT ON TABLE dast_site_profile_secret_variables IS '{"owner":"group::dynamic analysis","description":"Secret variables used in DAST on-demand scans"}';
@ -17285,6 +17288,7 @@ CREATE TABLE packages_debian_project_distribution_keys (
fingerprint text NOT NULL,
project_id bigint,
CONSTRAINT check_9e8a5eef0a CHECK ((char_length(fingerprint) <= 255)),
CONSTRAINT check_c2a4dc05d5 CHECK ((project_id IS NOT NULL)),
CONSTRAINT check_d188f6547f CHECK ((char_length(public_key) <= 524288))
);

View File

@ -10,4 +10,5 @@ level: error
nonword: true
scope: raw
tokens:
- ^\*\*Offering:\*\* (Dedicated|[^\n]*(SaaS|self-managed|, Self-Managed|GitLab dedicated|and|GitLab Dedicated,|, GitLab\.com|, Dedicated))
- ^\*\*Offering:\*\*[^\n]*(SaaS|[Ss]elf-managed|dedicated|and|Dedicated,|, GitLab\.com)
- ^\*\*Offering:\*\*[^\n]*(?<!GitLab )(Self-Managed|Dedicated)

View File

@ -1307,6 +1307,45 @@ response to assign users as administrator based on group membership, configure G
::EndTabs
### Configure a custom duration for ID Tokens
DETAILS:
**Tier:** Free, Premium, Ultimate
**Offering:** Self-managed
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/377654) in GitLab 17.8.
By default, GitLab ID tokens expire after 120 seconds.
To configure a custom duration for your ID tokens:
::Tabs
:::TabTitle Linux package (Omnibus)
1. Edit `/etc/gitlab/gitlab.rb`:
```ruby
gitlab_rails['oidc_provider_openid_id_token_expire_in_seconds'] = 3600
```
1. Save the file and [reconfigure GitLab](../restart_gitlab.md#reconfigure-a-linux-package-installation) for the changes to take effect.
:::TabTitle Self-compiled (source)
1. Edit `/home/git/gitlab/config/gitlab.yml`:
```yaml
production: &base
oidc_provider:
openid_id_token_expire_in_seconds: 3600
```
1. Save the file and [reconfigure GitLab](../restart_gitlab.md#self-compiled-installations)
for the changes to take effect.
::EndTabs
## Troubleshooting
1. Ensure `discovery` is set to `true`. If you set it to `false`, you must

View File

@ -84,7 +84,7 @@ The following settings are available:
DETAILS:
**Tier:** Premium, Ultimate
**Offering:** Self-managed
**Offering:** GitLab Self-Managed
**Status:** Beta
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/501717) in GitLab 17.8.

View File

@ -950,6 +950,23 @@ Returns [`Project`](#project).
| ---- | ---- | ----------- |
| <a id="queryprojectfullpath"></a>`fullPath` | [`ID!`](#id) | Full path of the project. For example, `gitlab-org/gitlab-foss`. |
### `Query.projectSecret`
View a specific project secret.
DETAILS:
**Introduced** in GitLab 17.9.
**Status**: Experiment.
Returns [`ProjectSecret`](#projectsecret).
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="queryprojectsecretname"></a>`name` | [`String!`](#string) | Name of the project secret to view. |
| <a id="queryprojectsecretprojectpath"></a>`projectPath` | [`ID!`](#id) | Project the secrets belong to. |
### `Query.projectSecrets`
List project secrets.
@ -27824,6 +27841,7 @@ Represents an entry from the Cloud License history.
| <a id="linkedworkitemtypelinktype"></a>`linkType` | [`String!`](#string) | Type of link. |
| <a id="linkedworkitemtypelinkupdatedat"></a>`linkUpdatedAt` | [`Time!`](#time) | Timestamp the link was updated. |
| <a id="linkedworkitemtypeworkitem"></a>`workItem` | [`WorkItem`](#workitem) | Linked work item. |
| <a id="linkedworkitemtypeworkitemstate"></a>`workItemState` | [`WorkItemState!`](#workitemstate) | State of the linked work item. |
### `Location`

View File

@ -176,28 +176,8 @@ target branch is artificially merged into the source branch, then the resulting
merge ref is compared to the source branch to calculate an accurate
diff.
Until we complete the epics ["use merge refs for diffs"](https://gitlab.com/groups/gitlab-org/-/epics/854)
and ["merge conflicts in diffs"](https://gitlab.com/groups/gitlab-org/-/epics/4893),
both options `main (base)` and `main (HEAD)` are available to be displayed in merge requests:
![Merge ref head options](../img/merge_ref_head_options_v13_6.png)
The `main (HEAD)` option is meant to replace `main (base)` in the future.
In order to support comments for both options, diff note positions are stored for
both `main (base)` and `main (HEAD)` versions ([introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/198457) in 12.10).
The position for `main (base)` version is stored in `Note#position` and
`Note#original_position` columns, for `main (HEAD)` version `DiffNotePosition`
has been introduced.
One of the key challenges to deal with when working on merge ref diffs are merge
conflicts. If the target and source branch contains a merge conflict, the branches
cannot be automatically merged. The
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [recording on YouTube](https://www.youtube.com/watch?v=GFXIFA4ZuZw&feature=youtu.be&ab_channel=GitLabUnfiltered) <!-- Video published on 2020-07-24 -->
is a quick introduction to the problem and the motivation behind the [epic](https://gitlab.com/groups/gitlab-org/-/epics/854).
In 13.5 a solution for both-modified merge
conflict has been
[introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/232484). However,
there are more classes of merge conflicts that are to be
[addressed](https://gitlab.com/groups/gitlab-org/-/epics/4893) in the future.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

View File

@ -209,6 +209,7 @@ The following GitLab application features are not available:
- For more information, see the [Supporting AI Features on GitLab Dedicated](https://about.gitlab.com/direction/saas-platforms/dedicated/#supporting-ai-features-on-gitlab-dedicated)
- Features other than [available features](#available-features) that must be configured outside of the GitLab user interface
- Any functionality or feature behind a feature flag that is turned `off` by default
- [Sigstore for keyless signing and verification](../../ci/yaml/signing_examples.md)
The following features are not supported:

View File

@ -47,6 +47,19 @@ Policy changes made in a merge request take effect as soon as the merge request
that do not go through a merge request, but instead are committed directly to the default branch,
may require up to 10 minutes before the policy changes take effect.
## Deleting security policy projects
> - The deletion protection for security policy projects was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/482967) in GitLab 17.8 [with a flag](../../../administration/feature_flags.md) named `reject_security_policy_project_deletion`. Disabled by default.
> - The deletion protection for groups that contain security policy projects was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/512043) in GitLab 17.9 [with a flag](../../../administration/feature_flags.md) named `reject_security_policy_project_deletion_groups`. Disabled by default.
FLAG:
The availability of this feature is controlled by a feature flag.
For more information, see the history.
To delete a security policy project or one of its parent groups, you must remove the link to it
from all other projects or groups. Otherwise, an error message is displayed when you attempt
to delete a linked security policy project or a parent group.
## Policy design guidelines
When designing your policies, your goals should be to:

View File

@ -18,7 +18,7 @@ info: "To determine the technical writer assigned to the Stage/Group associated
DETAILS:
**Tier:** Premium, Ultimate
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
**Offering:** GitLab.com, GitLab Self-Managed, GitLab Dedicated
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/172757) in GitLab 17.7, all audit events can be streamed.

View File

@ -52,31 +52,28 @@ The context Chat is aware of also depends on your subscription tier:
In the GitLab UI, GitLab Duo Chat knows about these areas:
| Area | How to ask Chat |
|----------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| Epics | From the epic, ask about `this epic`, `this`, or the URL. From any UI area, ask about the URL. |
| Issues | From the issue, ask about `this issue`, `this`, or the URL. From any UI area, ask about the URL. |
| Code files | From the single file, ask about `this code` or `this file`. From any UI area, ask about the URL. |
| Area | How to ask Chat |
|----------------|-----------------|
| Epics | From the epic, ask about `this epic`, `this`, or the URL. From any UI area, ask about the URL. |
| Issues | From the issue, ask about `this issue`, `this`, or the URL. From any UI area, ask about the URL. |
| Code files | From the single file, ask about `this code` or `this file`. From any UI area, ask about the URL. |
| Merge requests | From the merge request, ask about `this merge request`, `this`, or the URL. For more information, see [Ask about a specific merge request](examples.md#ask-about-a-specific-merge-request). |
| Commits | From the commit, ask about `this commit` or `this`. From any UI area, ask about the URL. |
| Pipeline jobs | From the pipeline job, ask about `this pipeline job` or `this`. From any UI area, ask about the URL. |
| Commits | From the commit, ask about `this commit` or `this`. From any UI area, ask about the URL. |
| Pipeline jobs | From the pipeline job, ask about `this pipeline job` or `this`. From any UI area, ask about the URL. |
In the IDEs, GitLab Duo Chat knows about these areas:
| Area | How to ask Chat |
|------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| Selected lines in the editor | With the lines selected, ask about `this code` or `this file`. Chat is not aware of the file; you must select the lines you want to ask about. |
| Epics | Ask about the URL. |
| Issues | Ask about the URL. |
| Files | Use the `/include` command to search for project file(s) to add to Duo Chat's context. After you've added the files, you can ask Duo Chat questions about the file contents. Available for VS Code only. For more information see [Include Context in the IDE](examples.md#ask-about-specific-files) |
| Area | How to ask Chat |
|------------------------------|-----------------|
| Selected lines in the editor | With the lines selected, ask about `this code` or `this file`. Chat is not aware of the file; you must select the lines you want to ask about. |
| Epics | Ask about the URL. |
| Issues | Ask about the URL. |
| Files | Use the `/include` command to search for project files to add to Duo Chat's context. After you've added the files, you can ask Duo Chat questions about the file contents. Available for VS Code only. For more information see [Include Context in the IDE](examples.md#ask-about-specific-files) |
In addition, in the IDEs, when you use any of the slash commands,
like `/explain`, `/refactor`, `/fix`, or `/tests,` Duo Chat has access to the
code you selected.
NOTE:
[Repository X-Ray](../project/repository/code_suggestions/repository_xray.md) automatically enriches code generation requests for [GitLab Duo Code Suggestions](../project/repository/code_suggestions/index.md). If your project has access to Code Suggestions, then the `/refactor`, `/fix`, and `/tests` slash commands also have access to the latest Repository X-Ray report, and include that report as context for Duo.
Duo Chat always has access to:
- GitLab documentation.
@ -84,6 +81,17 @@ Duo Chat always has access to:
We are continuously working to expand contextual awareness of Chat to include more types of content.
### Additional features
[Repository X-Ray](../project/repository/code_suggestions/repository_xray.md) automatically enriches
code generation requests for [GitLab Duo Code Suggestions](../project/repository/code_suggestions/index.md).
If your project has access to Code Suggestions, then the `/refactor`, `/fix`, and `/tests` slash commands
also have access to the latest Repository X-Ray report, and include that report as context for Duo.
The extensions for GitLab Duo scan for secrets and sensitive values matching known formats. The extensions
redact this sensitive content locally before sending it to Duo Chat, or using it for code generation.
This applies to files added via `/include`, and all generation commands.
## Use GitLab Duo Chat in the GitLab UI
1. In the upper-right corner, select **GitLab Duo Chat**. A drawer opens on the right side of your screen.

View File

@ -52576,6 +52576,9 @@ msgstr ""
msgid "Select an option"
msgstr ""
msgid "Select artifacts"
msgstr ""
msgid "Select assignee"
msgstr ""

View File

@ -102,6 +102,10 @@ module QA
enabled?(ENV['SELECTIVE_EXECUTION_IMPROVED'], default: false)
end
def mr_targeting_stable_branch?
/^[\d-]+-stable(-ee|-jh)?$/.match?(ENV['CI_MERGE_REQUEST_TARGET_BRANCH_NAME'] || "")
end
def schedule_type
ENV['SCHEDULE_TYPE']
end

View File

@ -55,9 +55,14 @@ module QA
# Generate E2E test pipelines yaml files
#
# @param pipeline_types [Array] pipeline types to generate
# @return [void]
def create
updated_pipeline_definitions.each do |type, yaml|
def create(pipeline_types = SUPPORTED_PIPELINES)
unless (pipeline_types - SUPPORTED_PIPELINES).empty?
raise(ArgumentError, "Unsupported pipeline type filter set!")
end
updated_pipeline_definitions(pipeline_types).each do |type, yaml|
file_name = generated_yml_file_name(type)
File.write(file_name, "#{yaml}\n#{variables_section}\n")
logger.info("Pipeline definition file created: '#{file_name}'")
@ -155,7 +160,7 @@ module QA
"FEATURE_FLAGS" => env["QA_FEATURE_FLAGS"],
# QA_SUITES is only used by test-on-omnibus due to pipeline being reusable in external projects
"QA_SUITES" => executable_qa_suites,
"QA_TESTS" => tests.any? ? tests.join(" ") : nil
"QA_TESTS" => tests.join(" ")
}.filter_map { |k, v| " #{k}: \"#{v}\"" unless v.blank? }.join("\n")
"#{variables}#{vars}"
@ -211,9 +216,12 @@ module QA
# Updated pipeline yml files
#
# @param pipeline_types [Array<Symbol>]
# @return [Hash<Symbol, String>]
def updated_pipeline_definitions
def updated_pipeline_definitions(pipeline_types)
pipeline_job_runtimes.each_with_object({}) do |(pipeline_type, jobs), definitions|
next unless pipeline_types.include?(pipeline_type)
logger.info("Processing pipeline '#{pipeline_type}'")
definitions[pipeline_type] = jobs.reduce(pipeline_definitions[pipeline_type]) do |pipeline_yml, job|
runtime_min = (job[:runtime] / 60).ceil

View File

@ -18,25 +18,19 @@ module QA
/Dockerfile\.assets/
)
def initialize(mr_diff, mr_labels)
def initialize(mr_diff)
@mr_diff = mr_diff
@mr_labels = mr_labels
end
# Specific specs to run
#
# @return [String]
def qa_tests
return if mr_diff.empty? || dependency_changes
return if only_spec_changes? && mr_diff.all? { |change| change[:deleted_file] }
# re-enable with gitlab-org/quality/analytics/team#18
# return selective_tests_from_code_paths_mapping if coverage_based_mapping?
return unless only_spec_changes?
# @return [Array]
def qa_tests(from_code_path_mapping: false)
return [] if mr_diff.empty? || dependency_changes
return changed_specs if only_spec_changes?
return selective_tests_from_code_paths_mapping if from_code_path_mapping
mr_diff
.reject { |change| change[:deleted_file] }
.map { |change| change[:path].delete_prefix("qa/") } # make paths relative to qa directory
.join(" ")
[]
end
# Qa framework changes
@ -52,10 +46,9 @@ module QA
.any?
end
def targeting_stable_branch?
/^[\d-]+-stable(-ee|-jh)?$/.match?(ENV['CI_MERGE_REQUEST_TARGET_BRANCH_NAME'])
end
# Only quarantine changes
#
# @return [Boolean]
def quarantine_changes?
return false if mr_diff.empty?
return false if mr_diff.any? { |change| change[:new_file] || change[:deleted_file] }
@ -81,22 +74,25 @@ module QA
false
end
# All changes are spec removals
#
# @return [Boolean]
def only_spec_removal?
only_spec_changes? && mr_diff.all? { |change| change[:deleted_file] }
end
private
# @return [Array]
attr_reader :mr_diff
# @return [Array]
attr_reader :mr_labels
# @return [Array]
attr_reader :selected_e2e_tests
# Use coverage based test mapping
# Changed spec files
#
# @return [Boolean]
def coverage_based_mapping?
QA::Runtime::Env.selective_execution_improved_enabled? && non_qa_changes? && !targeting_stable_branch?
# @return [Array, nil]
def changed_specs
mr_diff
.reject { |change| change[:deleted_file] }
.map { |change| change[:path].delete_prefix("qa/") } # make paths relative to qa directory
end
# Are the changed files only qa specs?
@ -131,17 +127,22 @@ module QA
#
# @return [Array]
def selective_tests_from_code_paths_mapping
clean_map = code_paths_map&.each_with_object({}) do |(test_filename, code_mappings), hsh|
name = test_filename.gsub("./", "").split(":").first
hsh[name] = hsh.key?(name) ? (code_mappings + hsh[name]).uniq : code_mappings
logger.info("Fetching tests to execute based on code paths mapping")
unless code_paths_map
logger.warn("Failed to obtain code mappings for test selection!")
return []
end
tests = clean_map
&.select { |_test, mappings| changed_files.any? { |file| mappings.include?("./#{file}") } }
&.keys
clean_map = code_paths_map.each_with_object(Hash.new { |h, k| h[k] = [] }) do |(example_id, mappings), hsh|
name = example_id.gsub("./", "").split(":").first
logger.info("Selected tests from mapping: '#{tests}'")
tests&.join(" ")
hsh[name] = (hsh[name] + mappings).uniq
end
clean_map
.select { |_test, mappings| changed_files.any? { |file| mappings.include?("./#{file}") } }
.keys
end
# Get the mapping hash from GCP storage

View File

@ -131,6 +131,16 @@ module QA
expect(generated_cng_yaml).to include("some-other-job" => cng_pipeline_definition["some-other-job"])
end
it "only creates specifically selected pipelines" do
pipeline_creator.create([:test_on_omnibus])
expect(File.exist?(cng_pipeline_file)).to be(false)
end
it "raises error on incorrect pipeline type in argument" do
expect { pipeline_creator.create([:test_on_dot_com]) }.to raise_error(ArgumentError)
end
context "with specific test files" do
let(:test_files) { ["some_spec.rb", "some_other_spec.rb"] }

View File

@ -4,13 +4,10 @@ require "fog/google"
RSpec.describe QA::Tools::Ci::QaChanges do
include QA::Support::Helpers::StubEnv
subject(:qa_changes) { described_class.new(mr_diff, mr_labels) }
let(:mr_labels) { [] }
subject(:qa_changes) { described_class.new(mr_diff) }
before do
allow(File).to receive(:directory?).and_return(false)
stub_env('SELECTIVE_EXECUTION_IMPROVED', false)
end
context "with spec only changes" do
@ -23,16 +20,16 @@ RSpec.describe QA::Tools::Ci::QaChanges do
it ".qa_tests return changed specs" do
expect(qa_changes.qa_tests).to eq(
"qa/specs/features/test_spec.rb qa/specs/features/another_test_spec.rb"
["qa/specs/features/test_spec.rb", "qa/specs/features/another_test_spec.rb"]
)
end
it ".framework_changes? return false" do
expect(qa_changes.framework_changes?).to eq(false)
expect(qa_changes.framework_changes?).to be(false)
end
it ".quarantine_changes? return false" do
expect(qa_changes.quarantine_changes?).to eq(false)
expect(qa_changes.quarantine_changes?).to be(false)
end
end
@ -40,15 +37,15 @@ RSpec.describe QA::Tools::Ci::QaChanges do
let(:mr_diff) { [{ path: "qa/qa.rb" }] }
it ".qa_tests do not return specific specs" do
expect(qa_changes.qa_tests).to be_nil
expect(qa_changes.qa_tests).to be_empty
end
it ".framework_changes? return true" do
expect(qa_changes.framework_changes?).to eq(true)
expect(qa_changes.framework_changes?).to be(true)
end
it ".quarantine_changes? return false" do
expect(qa_changes.quarantine_changes?).to eq(false)
expect(qa_changes.quarantine_changes?).to be(false)
end
end
@ -56,7 +53,7 @@ RSpec.describe QA::Tools::Ci::QaChanges do
let(:mr_diff) { [{ path: "qa/qa/specs/features/shared_context/some_context.rb", diff: "" }] }
it ".qa_tests do not return specific specs" do
expect(qa_changes.qa_tests).to be_nil
expect(qa_changes.qa_tests).to be_empty
end
end
@ -64,28 +61,22 @@ RSpec.describe QA::Tools::Ci::QaChanges do
let(:mr_diff) { [{ path: "Gemfile" }] }
it ".framework_changes? return false" do
expect(qa_changes.framework_changes?).to eq(false)
expect(qa_changes.framework_changes?).to be(false)
end
it ".quarantine_changes? return false" do
expect(qa_changes.quarantine_changes?).to eq(false)
expect(qa_changes.quarantine_changes?).to be(false)
end
context "without mr labels" do
it ".qa_tests do not return any specific specs" do
expect(qa_changes.qa_tests).to be_nil
end
end
context "with SELECTIVE_EXECUTION_IMPROVED enabled", skip: "Re-enable with gitlab-org/quality/analytics/team#18" do
context "with from_code_path_mapping option for #qa_tests" do
let(:code_paths_mapping_data) do
{
"./qa/specs/features/test_spec.rb:23": %w[./lib/model.rb ./lib/second.rb],
"./qa/specs/features/test_spec_2.rb:11": ['./app/controller.rb']
}.stringify_keys
"./qa/specs/features/test_spec.rb:23" => %w[./lib/model.rb ./lib/second.rb],
"./qa/specs/features/test_spec_2.rb:11" => ['./app/controller.rb']
}
end
let(:selected_specs) { "qa/specs/features/test_spec.rb" }
let(:selected_specs) { ["qa/specs/features/test_spec.rb"] }
let(:gcs_project_id) { 'gitlab-qa-resources' }
let(:gcs_creds) { 'gcs-creds' }
let(:gcs_bucket_name) { 'metrics-gcs-bucket' }
@ -96,9 +87,7 @@ RSpec.describe QA::Tools::Ci::QaChanges do
end
before do
stub_env('SELECTIVE_EXECUTION_IMPROVED', true)
stub_env('QA_CODE_PATH_MAPPINGS_GCS_CREDENTIALS', gcs_creds)
stub_env('CI_MERGE_REQUEST_TARGET_BRANCH_NAME', "master")
allow(QA::Tools::Ci::CodePathsMapping).to receive(:new).and_return(code_paths_mapping)
allow(Fog::Storage::Google).to receive(:new)
@ -106,32 +95,30 @@ RSpec.describe QA::Tools::Ci::QaChanges do
.and_return(gcs_client)
end
describe '#qa_tests' do
context 'when there is a match from code paths mapping' do
let(:mr_diff) { [{ path: 'lib/model.rb' }] }
context 'when there is a match from code paths mapping' do
let(:mr_diff) { [{ path: 'lib/model.rb' }] }
it "returns specific specs" do
expect(qa_changes.qa_tests.split(" ")).to include(selected_specs)
end
it "returns specific specs" do
expect(qa_changes.qa_tests(from_code_path_mapping: true)).to eq(selected_specs)
end
end
context 'when there is no match from code paths mapping' do
let(:mr_diff) { [{ path: 'lib/new.rb' }] }
it "returns nil" do
expect(qa_changes.qa_tests(from_code_path_mapping: true)).to be_empty
end
end
context 'when code paths mapping import returns nil' do
let(:mr_diff) { [{ path: 'lib/model.rb' }] }
let(:code_paths_mapping) do
instance_double(QA::Tools::Ci::CodePathsMapping, import: nil)
end
context 'when there is no match from code paths mapping' do
let(:mr_diff) { [{ path: 'lib/new.rb' }] }
it "returns nil" do
expect(qa_changes.qa_tests).to be_nil
end
end
context 'when code paths mapping import returns nil' do
let(:mr_diff) { [{ path: 'lib/model.rb' }] }
let(:code_paths_mapping) do
instance_double(QA::Tools::Ci::CodePathsMapping, import: nil)
end
it "does not throw an error" do
expect(qa_changes.qa_tests).to be_nil
end
it "does not throw an error" do
expect(qa_changes.qa_tests(from_code_path_mapping: true)).to be_empty
end
end
end
@ -141,7 +128,7 @@ RSpec.describe QA::Tools::Ci::QaChanges do
let(:mr_diff) { [{ path: "qa/qa/specs/features/test_spec.rb", diff: "+ , quarantine: true" }] }
it ".quarantine_changes? return true" do
expect(qa_changes.quarantine_changes?).to eq(true)
expect(qa_changes.quarantine_changes?).to be(true)
end
end
@ -150,7 +137,7 @@ RSpec.describe QA::Tools::Ci::QaChanges do
let(:mr_diff) { [{ path: dependency_file }] }
it ".qa_tests do not return specific specs" do
expect(qa_changes.qa_tests).to be_nil
expect(qa_changes.qa_tests).to be_empty
end
end
end

View File

@ -11,49 +11,67 @@ namespace :ci do
logger.info("*** Analyzing merge request changes*** ")
pipeline_path = args[:pipeline_path] || "tmp"
diff = mr_diff
labels = mr_labels
run_all_label_present = mr_labels.include?("pipeline:run-all-e2e")
run_no_tests_label_present = mr_labels.include?("pipeline:skip-e2e")
if run_all_label_present && run_no_tests_label_present
raise "cannot have both pipeline:run-all-e2e and pipeline:skip-e2e labels. Please remove one of these labels"
elsif run_no_tests_label_present
logger.info("Merge request has pipeline:skip-e2e label, e2e test execution will be skipped.")
QA::Tools::Ci::PipelineCreator.create_noop(pipeline_path: pipeline_path, logger: logger)
next
end
qa_changes = QA::Tools::Ci::QaChanges.new(diff, labels)
# skip running tests when only quarantine changes detected
pipeline_creator = QA::Tools::Ci::PipelineCreator.new(
[],
pipeline_path: pipeline_path,
logger: logger
)
if run_no_tests_label_present
logger.info("Merge request has pipeline:skip-e2e label, e2e test execution will be skipped.")
next pipeline_creator.create_noop
end
diff = mr_diff
qa_changes = QA::Tools::Ci::QaChanges.new(diff)
if qa_changes.quarantine_changes?
logger.info("Merge request contains only quarantine changes, e2e test execution will be skipped!")
QA::Tools::Ci::PipelineCreator.create_noop(pipeline_path: pipeline_path, logger: logger)
next
next pipeline_creator.create_noop
end
if qa_changes.only_spec_removal?
logger.info("Merge request contains only e2e spec removal, e2e test execution will be skipped!")
next pipeline_creator.create_noop
end
# on run-all label or framework changes do not infer specific tests
tests = run_all_label_present || qa_changes.framework_changes? ? nil : qa_changes.qa_tests
run_all_tests = run_all_label_present || qa_changes.framework_changes?
tests = run_all_tests ? [] : qa_changes.qa_tests
if run_all_label_present
logger.info("Merge request has pipeline:run-all-e2e label, full test suite will be executed")
elsif qa_changes.framework_changes? # run all tests when framework changes detected
logger.info("Merge request contains qa framework changes, full test suite will be executed")
elsif tests
logger.info("Detected following specs to execute: '#{tests}'")
elsif tests.any?
logger.info("Following specs were selected for execution: '#{tests}'")
else
logger.info("No specific specs to execute detected, running full test suites will be executed")
logger.info("No specific specs to execute detected, full test suite will be executed")
end
feature_flags = QA::Tools::Ci::FfChanges.new(diff).fetch
logger.info("*** Creating E2E test pipeline definitions ***")
QA::Tools::Ci::PipelineCreator.new(
tests&.split(" ") || [],
creator_args = {
pipeline_path: pipeline_path,
logger: logger,
env: { "QA_FEATURE_FLAGS" => feature_flags }
).create
env: { "QA_FEATURE_FLAGS" => QA::Tools::Ci::FfChanges.new(diff).fetch }
}
logger.info("*** Creating E2E test pipeline definitions ***")
QA::Tools::Ci::PipelineCreator.new(tests, **creator_args).create
next if run_all_tests
next unless QA::Runtime::Env.selective_execution_improved_enabled? && !QA::Runtime::Env.mr_targeting_stable_branch?
pipelines_for_selective_improved = [:test_on_gdk]
logger.warn("*** Recreating #{pipelines_for_selective_improved} using spec list based on coverage mappings ***")
tests_from_mapping = qa_changes.qa_tests(from_code_path_mapping: true)
logger.info("Following specs were selected for execution: '#{tests_from_mapping}'")
QA::Tools::Ci::PipelineCreator.new(tests_from_mapping, **creator_args).create(pipelines_for_selective_improved)
end
desc "Export test run metrics to influxdb"

View File

@ -1,8 +1,16 @@
import { GlPopover } from '@gitlab/ui';
import { GlBadge, GlPopover } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { trimText } from 'helpers/text_helper';
import ArtifactsBlock from '~/ci/job_details/components/sidebar/artifacts_block.vue';
import { getTimeago } from '~/lib/utils/datetime_utility';
import {
lockedText,
formattedDate,
expiredArtifact,
nonExpiredArtifact,
lockedExpiredArtifact,
lockedNonExpiredArtifact,
reports,
} from './constants';
describe('Artifacts block', () => {
let wrapper;
@ -11,6 +19,7 @@ describe('Artifacts block', () => {
mountExtended(ArtifactsBlock, {
propsData: {
helpUrl: 'help-url',
reports: [],
...propsData,
},
});
@ -22,41 +31,7 @@ describe('Artifacts block', () => {
const findBrowseBtn = () => wrapper.findByTestId('browse-artifacts-button');
const findArtifactsHelpLink = () => wrapper.findByTestId('artifacts-help-link');
const findPopover = () => wrapper.findComponent(GlPopover);
const expireAt = '2018-08-14T09:38:49.157Z';
const timeago = getTimeago();
const formattedDate = timeago.format(expireAt);
const lockedText =
'These artifacts are the latest. They will not be deleted (even if expired) until newer artifacts are available.';
const expiredArtifact = {
expireAt,
expired: true,
locked: false,
};
const nonExpiredArtifact = {
downloadPath: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/download',
browsePath: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/browse',
keepPath: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/keep',
expireAt,
expired: false,
locked: false,
};
const lockedExpiredArtifact = {
...expiredArtifact,
downloadPath: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/download',
browsePath: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/browse',
expired: true,
locked: true,
};
const lockedNonExpiredArtifact = {
...nonExpiredArtifact,
keepPath: undefined,
locked: true,
};
const findReportsBadge = () => wrapper.findComponent(GlBadge);
describe('with expired artifacts that are not locked', () => {
beforeEach(() => {
@ -190,4 +165,29 @@ describe('Artifacts block', () => {
expect(findArtifactsHelpLink().attributes('href')).toBe('/help/ci/jobs/job_artifacts');
});
});
describe('without reports', () => {
beforeEach(() => {
wrapper = createWrapper({
artifact: nonExpiredArtifact,
});
});
it('does not display report badge', () => {
expect(findReportsBadge().exists()).toBe(false);
});
});
describe('with reports', () => {
beforeEach(() => {
wrapper = createWrapper({
artifact: nonExpiredArtifact,
reports,
});
});
it('does display report badge', () => {
expect(findReportsBadge().text()).toBe('sast');
});
});
});

View File

@ -0,0 +1,46 @@
import { getTimeago } from '~/lib/utils/datetime_utility';
const expireAt = '2018-08-14T09:38:49.157Z';
export const lockedText =
'These artifacts are the latest. They will not be deleted (even if expired) until newer artifacts are available.';
export const formattedDate = getTimeago().format(expireAt);
export const expiredArtifact = {
expireAt,
expired: true,
locked: false,
};
export const nonExpiredArtifact = {
downloadPath: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/download',
browsePath: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/browse',
keepPath: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/keep',
expireAt,
expired: false,
locked: false,
};
export const lockedExpiredArtifact = {
...expiredArtifact,
downloadPath: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/download',
browsePath: '/gitlab-org/gitlab-foss/-/jobs/98314558/artifacts/browse',
expired: true,
locked: true,
};
export const lockedNonExpiredArtifact = {
...nonExpiredArtifact,
keepPath: undefined,
locked: true,
};
export const reports = [
{
file_type: 'sast',
file_format: 'raw',
size: 2036,
download_path: '/root/security-reports/-/jobs/12281/artifacts/download?file_type=sast',
},
];

View File

@ -6,7 +6,7 @@ RSpec.describe Types::WorkItems::LinkedItemType, feature_category: :portfolio_ma
specify { expect(described_class.graphql_name).to eq('LinkedWorkItemType') }
it 'exposes the expected fields' do
expected_fields = %i[linkCreatedAt linkId linkType linkUpdatedAt workItem]
expected_fields = %i[linkCreatedAt linkId linkType linkUpdatedAt workItem workItemState]
expect(described_class).to have_graphql_fields(*expected_fields)
end

View File

@ -64,8 +64,8 @@ RSpec.describe Gitlab::ImportExport::Project::ExportedRelationsMerger do
expect(result).to eq(false)
expect(shared.errors).to match_array(
[
/^undefined method `export_file' for nil:NilClass/,
/^undefined method `export_file' for nil:NilClass/
/^undefined method `export_file' for nil/,
/^undefined method `export_file' for nil/
]
)
end

View File

@ -303,4 +303,93 @@ RSpec.describe Ci::JobToken::Allowlist, feature_category: :continuous_integratio
it { is_expected.to eq scope_by_subgroup }
end
end
describe '#bulk_add_projects!' do
let_it_be(:added_project1) { create(:project) }
let_it_be(:added_project2) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:policies) { %w[read_containers read_packages] }
subject(:add_projects) do
allowlist.bulk_add_projects!([added_project1, added_project2], policies: policies, user: user,
autopopulated: true)
end
it 'adds the project scope links' do
add_projects
project_links = Ci::JobToken::ProjectScopeLink.where(source_project_id: source_project.id)
project_link = project_links.first
expect(allowlist.projects).to match_array([source_project, added_project1, added_project2])
expect(project_link.added_by_id).to eq(user.id)
expect(project_link.source_project_id).to eq(source_project.id)
expect(project_link.target_project_id).to eq(added_project1.id)
expect(project_link.job_token_policies).to eq(policies)
end
context 'when feature-flag `add_policies_to_ci_job_token` is disabled' do
before do
stub_feature_flags(add_policies_to_ci_job_token: false)
end
it 'adds the project scope link but with empty job token policies' do
add_projects
project_links = Ci::JobToken::ProjectScopeLink.where(source_project_id: source_project.id)
project_link = project_links.first
expect(allowlist.projects).to match_array([source_project, added_project1, added_project2])
expect(project_link.added_by_id).to eq(user.id)
expect(project_link.source_project_id).to eq(source_project.id)
expect(project_link.target_project_id).to eq(added_project1.id)
expect(project_link.job_token_policies).to eq([])
end
end
end
describe '#bulk_add_groups!' do
let_it_be(:added_group1) { create(:group) }
let_it_be(:added_group2) { create(:group) }
let_it_be(:user) { create(:user) }
let_it_be(:policies) { %w[read_containers read_packages] }
subject(:add_groups) do
allowlist.bulk_add_groups!([added_group1, added_group2], policies: policies, user: user, autopopulated: true)
end
it 'adds the group scope links' do
add_groups
group_links = Ci::JobToken::GroupScopeLink.where(source_project_id: source_project.id)
group_link = group_links.first
expect(allowlist.groups).to match_array([added_group1, added_group2])
expect(group_link.added_by_id).to eq(user.id)
expect(group_link.source_project_id).to eq(source_project.id)
expect(group_link.target_group_id).to eq(added_group1.id)
expect(group_link.job_token_policies).to eq(policies)
expect(group_link.autopopulated).to be true
end
context 'when feature-flag `add_policies_to_ci_job_token` is disabled' do
before do
stub_feature_flags(add_policies_to_ci_job_token: false)
end
it 'adds the group scope link but with empty job token policies' do
add_groups
group_links = Ci::JobToken::GroupScopeLink.where(source_project_id: source_project.id)
group_link = group_links.first
expect(allowlist.groups).to match_array([added_group1, added_group2])
expect(group_link.added_by_id).to eq(user.id)
expect(group_link.source_project_id).to eq(source_project.id)
expect(group_link.target_group_id).to eq(added_group1.id)
expect(group_link.job_token_policies).to eq([])
expect(group_link.autopopulated).to be true
end
end
end
end

View File

@ -15,6 +15,19 @@ RSpec.describe Ci::JobToken::GroupScopeLink, feature_category: :continuous_integ
let!(:model) { create(:ci_job_token_group_scope_link, added_by: parent) }
end
it_behaves_like 'a BulkInsertSafe model', described_class do
let(:current_time) { Time.zone.now }
let(:valid_items_for_bulk_insertion) do
build_list(:ci_job_token_group_scope_link, 10, source_project_id: project.id,
created_at: current_time) do |project_scope_link|
project_scope_link.target_group = create(:group)
end
end
let(:invalid_items_for_bulk_insertion) { [] } # class does not have any validations defined
end
describe 'unique index' do
let!(:link) { create(:ci_job_token_group_scope_link) }

View File

@ -15,6 +15,19 @@ RSpec.describe Ci::JobToken::ProjectScopeLink, feature_category: :continuous_int
let!(:model) { create(:ci_job_token_project_scope_link, added_by: parent) }
end
it_behaves_like 'a BulkInsertSafe model', described_class do
let(:current_time) { Time.zone.now }
let(:valid_items_for_bulk_insertion) do
build_list(:ci_job_token_project_scope_link, 10, source_project_id: project.id,
created_at: current_time) do |project_scope_link|
project_scope_link.target_project = create(:project)
end
end
let(:invalid_items_for_bulk_insertion) { [] } # class does not have any validations defined
end
describe 'unique index' do
let!(:link) { create(:ci_job_token_project_scope_link) }

View File

@ -3,239 +3,5 @@
require 'spec_helper'
RSpec.describe Integrations::Bamboo, :use_clean_rails_memory_store_caching, feature_category: :integrations do
include ReactiveCachingHelpers
include StubRequests
let(:bamboo_url) { 'http://gitlab.com/bamboo' }
let_it_be(:project) { build(:project) }
subject(:integration) { build(:bamboo_integration, project: project, bamboo_url: bamboo_url) }
it_behaves_like Integrations::Base::Ci
it_behaves_like Integrations::ResetSecretFields
include_context Integrations::EnableSslVerification
describe 'Validations' do
context 'when active' do
before do
integration.active = true
end
it { is_expected.to validate_presence_of(:build_key) }
it { is_expected.to validate_presence_of(:bamboo_url) }
it_behaves_like 'issue tracker integration URL attribute', :bamboo_url
describe '#username' do
it 'does not validate the presence of username if password is nil' do
integration.password = nil
expect(integration).not_to validate_presence_of(:username)
end
it 'validates the presence of username if password is present' do
integration.password = 'secret'
expect(integration).to validate_presence_of(:username)
end
end
describe '#password' do
it 'does not validate the presence of password if username is nil' do
integration.username = nil
expect(integration).not_to validate_presence_of(:password)
end
it 'validates the presence of password if username is present' do
integration.username = 'john'
expect(integration).to validate_presence_of(:password)
end
end
end
context 'when inactive' do
before do
integration.active = false
end
it { is_expected.not_to validate_presence_of(:build_key) }
it { is_expected.not_to validate_presence_of(:bamboo_url) }
it { is_expected.not_to validate_presence_of(:username) }
it { is_expected.not_to validate_presence_of(:password) }
end
end
describe '#execute' do
it 'runs update and build action' do
stub_update_and_build_request
integration.execute(Gitlab::DataBuilder::Push::SAMPLE_DATA)
end
end
describe '#build_page' do
it 'returns the contents of the reactive cache' do
stub_reactive_cache(integration, { build_page: 'foo' }, 'sha', 'ref')
expect(integration.build_page('sha', 'ref')).to eq('foo')
end
end
describe '#commit_status' do
it 'returns the contents of the reactive cache' do
stub_reactive_cache(integration, { commit_status: 'foo' }, 'sha', 'ref')
expect(integration.commit_status('sha', 'ref')).to eq('foo')
end
end
shared_examples 'reactive cache calculation' do
describe '#build_page' do
subject { integration.calculate_reactive_cache('123', 'unused')[:build_page] }
it 'returns a specific URL when status is 500' do
stub_request(status: 500)
is_expected.to eq('http://gitlab.com/bamboo/browse/foo')
end
it 'returns a specific URL when response has no results' do
stub_request(body: %q({"results":{"results":{"size":"0"}}}))
is_expected.to eq('http://gitlab.com/bamboo/browse/foo')
end
it 'returns a build URL when bamboo_url has no trailing slash' do
stub_request(body: bamboo_response)
is_expected.to eq('http://gitlab.com/bamboo/browse/42')
end
context 'when bamboo_url has trailing slash' do
let(:bamboo_url) { 'http://gitlab.com/bamboo/' }
it 'returns a build URL' do
stub_request(body: bamboo_response)
is_expected.to eq('http://gitlab.com/bamboo/browse/42')
end
end
end
describe '#commit_status' do
subject { integration.calculate_reactive_cache('123', 'unused')[:commit_status] }
it 'sets commit status to :error when status is 500' do
stub_request(status: 500)
is_expected.to eq(:error)
end
it 'sets commit status to "pending" when status is 404' do
stub_request(status: 404)
is_expected.to eq('pending')
end
it 'sets commit status to "pending" when response has no results' do
stub_request(body: %q({"results":{"results":{"size":"0"}}}))
is_expected.to eq('pending')
end
it 'sets commit status to "success" when build state contains Success' do
stub_request(body: bamboo_response(build_state: 'YAY Success!'))
is_expected.to eq('success')
end
it 'sets commit status to "failed" when build state contains Failed' do
stub_request(body: bamboo_response(build_state: 'NO Failed!'))
is_expected.to eq('failed')
end
it 'sets commit status to "pending" when build state contains Pending' do
stub_request(body: bamboo_response(build_state: 'NO Pending!'))
is_expected.to eq('pending')
end
it 'sets commit status to :error when build state is unknown' do
stub_request(body: bamboo_response(build_state: 'FOO BAR!'))
is_expected.to eq(:error)
end
Gitlab::HTTP::HTTP_ERRORS.each do |http_error|
it "sets commit status to :error with a #{http_error.name} error" do
WebMock.stub_request(:get, 'http://gitlab.com/bamboo/rest/api/latest/result/byChangeset/123?os_authType=basic')
.to_raise(http_error)
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
.with(instance_of(http_error), { project_id: project.id })
is_expected.to eq(:error)
end
end
end
end
describe '#calculate_reactive_cache' do
context 'when Bamboo API returns single result' do
let(:bamboo_response_template) do
%q({"results":{"results":{"size":"1","result":{"buildState":"%{build_state}","planResultKey":{"key":"42"}}}}})
end
it_behaves_like 'reactive cache calculation'
end
context 'when Bamboo API returns an array of results and we only consider the last one' do
let(:bamboo_response_template) do
'{"results":{"results":{"size":"2","result":[{"buildState":"%{build_state}","planResultKey":{"key":"41"}}, ' \
'{"buildState":"%{build_state}","planResultKey":{"key":"42"}}]}}}'
end
it_behaves_like 'reactive cache calculation'
end
end
describe '#avatar_url' do
it 'returns the avatar image path' do
expect(subject.avatar_url).to eq(ActionController::Base.helpers.image_path(
'illustrations/third-party-logos/integrations-logos/atlassian-bamboo.svg'
))
end
end
def stub_update_and_build_request(status: 200, body: nil)
bamboo_full_url = 'http://gitlab.com/bamboo/updateAndBuild.action?buildKey=foo&os_authType=basic'
stub_bamboo_request(bamboo_full_url, status, body)
end
def stub_request(status: 200, body: nil)
bamboo_full_url = 'http://gitlab.com/bamboo/rest/api/latest/result/byChangeset/123?os_authType=basic'
stub_bamboo_request(bamboo_full_url, status, body)
end
def stub_bamboo_request(url, status, body)
stub_full_request(url).to_return(
status: status,
headers: { 'Content-Type' => 'application/json' },
body: body
).with(basic_auth: %w[mic password])
end
def bamboo_response(build_state: 'success')
# reference: https://docs.atlassian.com/atlassian-bamboo/REST/6.2.5/#d2e786
format(bamboo_response_template, build_state: build_state)
end
it_behaves_like Integrations::Base::Bamboo
end

View File

@ -0,0 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Integrations::Instance::Bamboo, :use_clean_rails_memory_store_caching, feature_category: :integrations do
it_behaves_like Integrations::Base::Bamboo
end

View File

@ -0,0 +1,162 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::JobToken::AutopopulateAllowlistService, feature_category: :secrets_management do
let_it_be(:maintainer) { create(:user) }
let_it_be(:developer) { create(:user) }
let_it_be(:accessed_project) { create(:project) }
let(:service) { described_class.new(accessed_project, maintainer) }
# [1, 21], ns1, p1
# [1, 2, 3], ns1, ns2, p2
# [1, 2, 4], ns1, ns2, p3
# [1, 2, 5], ns1, ns2, p4
# [1, 2, 12, 13], ns1, ns2, ns3, p5
# [1, 6, 7], ns1, ns4, p6
# [1, 6, 8], ns1, ns4, p7
# [9, 10, 11] ns5, ns6, p8
let_it_be(:ns1) { create(:group, name: 'ns1') }
let_it_be(:ns2) { create(:group, parent: ns1, name: 'ns2') }
let_it_be(:ns3) { create(:group, parent: ns2, name: 'ns3') }
let_it_be(:ns4) { create(:group, parent: ns1, name: 'ns4') }
let_it_be(:ns5) { create(:group, name: 'ns5') }
let_it_be(:ns6) { create(:group, parent: ns5, name: 'ns6') }
let_it_be(:pns1) { create(:project_namespace, parent: ns1) }
let_it_be(:pns2) { create(:project_namespace, parent: ns2) }
let_it_be(:pns3) { create(:project_namespace, parent: ns2) }
let_it_be(:pns4) { create(:project_namespace, parent: ns2) }
let_it_be(:pns5) { create(:project_namespace, parent: ns3) }
let_it_be(:pns6) { create(:project_namespace, parent: ns4) }
let_it_be(:pns7) { create(:project_namespace, parent: ns4) }
let_it_be(:pns8) { create(:project_namespace, parent: ns6) }
let(:compaction_limit) { 4 }
before_all do
accessed_project.add_maintainer(maintainer)
accessed_project.add_developer(developer)
end
before do
origin_project_namespaces = [
pns1, pns2, pns3, pns4, pns5, pns6, pns7, pns8
]
origin_project_namespaces.each do |project_namespace|
create(:ci_job_token_authorization, origin_project: project_namespace.project, accessed_project: accessed_project,
last_authorized_at: 1.day.ago)
end
stub_const("#{described_class.name}::COMPACTION_LIMIT", compaction_limit)
end
describe '#execute' do
context 'with a user with the developer role' do
let(:service) { described_class.new(accessed_project, developer) }
it 'raises an access denied error' do
expect { service.execute }.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
it 'creates the expected group and project links for the given limit' do
service.execute
expect(Ci::JobToken::GroupScopeLink.autopopulated.pluck(:target_group_id)).to match_array([ns2.id, ns4.id])
expect(Ci::JobToken::ProjectScopeLink.autopopulated.pluck(:target_project_id)).to match_array([pns1.project.id,
pns8.project.id])
end
context 'with a compaction_limit of 3' do
let(:compaction_limit) { 3 }
it 'creates the expected group and project links' do
service.execute
expect(Ci::JobToken::GroupScopeLink.autopopulated.pluck(:target_group_id)).to match_array([ns1.id])
expect(Ci::JobToken::ProjectScopeLink.autopopulated.pluck(:target_project_id)).to match_array([pns8.project.id])
end
end
context 'with a compaction_limit of 1' do
let(:compaction_limit) { 1 }
it 'logs a CompactionLimitCannotBeAchievedError error' do
expect do
service.execute
end.to raise_error(Gitlab::Utils::TraversalIdCompactor::CompactionLimitCannotBeAchievedError)
expect(Ci::JobToken::GroupScopeLink.count).to be(0)
expect(Ci::JobToken::ProjectScopeLink.count).to be(0)
end
end
context 'when validation fails' do
let(:compaction_limit) { 5 }
it 'logs an UnexpectedCompactionEntry error' do
allow(Gitlab::Utils::TraversalIdCompactor).to receive(:compact).and_wrap_original do |original_method, *args|
original_response = original_method.call(*args)
original_response << [1, 2, 3]
end
expect { service.execute }.to raise_error(Gitlab::Utils::TraversalIdCompactor::UnexpectedCompactionEntry)
end
it 'logs a RedundantCompactionEntry error' do
allow(Gitlab::Utils::TraversalIdCompactor).to receive(:compact).and_wrap_original do |original_method, *args|
original_response = original_method.call(*args)
original_response << original_response.last.first(2)
end
expect { service.execute }.to raise_error(Gitlab::Utils::TraversalIdCompactor::RedundantCompactionEntry)
end
end
context 'with three top-level namespaces' do
# [1, 21], ns1, p1
# [1, 2, 3], ns1, ns2, p2
# [1, 2, 4], ns1, ns2, p3
# [1, 2, 5], ns1, ns2, p4
# [1, 2, 12, 13], ns1, ns2, ns3, p5
# [1, 6, 7], ns1, ns4, p6
# [1, 6, 8], ns1, ns4, p7
# [9, 10, 11] ns5, ns6, p8
# [14, 15] ns7, p9
let(:ns7) { create(:group, name: 'ns7') }
let(:pns9) { create(:project_namespace, parent: ns7) }
before do
create(:ci_job_token_authorization, origin_project: pns9.project, accessed_project: accessed_project,
last_authorized_at: 1.day.ago)
end
context 'with a compaction_limit of 2' do
let(:compaction_limit) { 2 }
it 'raises when the limit cannot be achieved' do
expect do
service.execute
end.to raise_error(Gitlab::Utils::TraversalIdCompactor::CompactionLimitCannotBeAchievedError)
expect(Ci::JobToken::GroupScopeLink.count).to be(0)
expect(Ci::JobToken::ProjectScopeLink.count).to be(0)
end
end
context 'with a compaction_limit of 3' do
let(:compaction_limit) { 3 }
it 'creates creates the expected group and project links' do
service.execute
expect(Ci::JobToken::GroupScopeLink.autopopulated.pluck(:target_group_id)).to match_array([ns1.id])
expect(Ci::JobToken::ProjectScopeLink.autopopulated.pluck(:target_project_id)).to match_array(
[pns8.project.id, pns9.project.id]
)
end
end
end
end
end

View File

@ -0,0 +1,79 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::JobToken::ClearAutopopulatedAllowlistService, feature_category: :secrets_management do
let_it_be(:accessed_project) { create(:project) }
let_it_be(:maintainer) { create(:user) }
let_it_be(:developer) { create(:user) }
let(:service) { described_class.new(accessed_project, maintainer) }
before_all do
accessed_project.add_maintainer(maintainer)
accessed_project.add_developer(developer)
end
describe '#execute' do
context 'with a user with the developer role' do
let(:service) { described_class.new(accessed_project, developer) }
it 'raises an access denied error' do
expect { service.execute }.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
it 'deletes autopopulated group scope links' do
create(:ci_job_token_group_scope_link, source_project: accessed_project, autopopulated: true)
create(:ci_job_token_group_scope_link, source_project: accessed_project, autopopulated: false)
expect do
service.execute
end.to change { Ci::JobToken::GroupScopeLink.autopopulated.count }.by(-1)
.and not_change { Ci::JobToken::ProjectScopeLink.autopopulated.count }
end
it 'deletes autopopulated project scope links' do
create(:ci_job_token_project_scope_link, source_project: accessed_project, direction: :inbound,
autopopulated: true)
create(:ci_job_token_project_scope_link, source_project: accessed_project, direction: :inbound,
autopopulated: false)
expect do
service.execute
end.to not_change { Ci::JobToken::GroupScopeLink.autopopulated.count }
.and change { Ci::JobToken::ProjectScopeLink.autopopulated.count }.by(-1)
end
it 'does not delete non-autopopulated links' do
create(:ci_job_token_group_scope_link, source_project: accessed_project, autopopulated: false)
create(:ci_job_token_project_scope_link, source_project: accessed_project, direction: :inbound,
autopopulated: false)
expect do
service.execute
end.to not_change { Ci::JobToken::GroupScopeLink.autopopulated.count }
.and not_change { Ci::JobToken::ProjectScopeLink.autopopulated.count }
end
it 'executes within a transaction' do
expect(ApplicationRecord).to receive(:transaction).and_yield
service.execute
end
it 'only deletes links for the given project' do
create(:ci_job_token_group_scope_link, source_project: accessed_project, autopopulated: true)
create(:ci_job_token_group_scope_link, source_project: create(:project), autopopulated: true)
create(:ci_job_token_project_scope_link, source_project: accessed_project, direction: :inbound,
autopopulated: true)
create(:ci_job_token_project_scope_link, source_project: create(:project), direction: :inbound,
autopopulated: true)
expect do
service.execute
end.to change { Ci::JobToken::GroupScopeLink.autopopulated.count }.by(-1)
.and change { Ci::JobToken::ProjectScopeLink.autopopulated.count }.by(-1)
end
end
end

View File

@ -33,7 +33,6 @@
- './ee/spec/features/burndown_charts_spec.rb'
- './ee/spec/features/burnup_charts_spec.rb'
- './ee/spec/features/ci_shared_runner_settings_spec.rb'
- './ee/spec/features/ci_shared_runner_warnings_spec.rb'
- './ee/spec/features/clusters/cluster_detail_page_spec.rb'
- './ee/spec/features/dashboards/activity_spec.rb'
- './ee/spec/features/dashboards/groups_spec.rb'

View File

@ -0,0 +1,241 @@
# frozen_string_literal: true
RSpec.shared_examples Integrations::Base::Bamboo do
include ReactiveCachingHelpers
include StubRequests
let(:bamboo_url) { 'http://gitlab.com/bamboo' }
let(:bamboo_update_url) { 'http://gitlab.com/bamboo/updateAndBuild.action?buildKey=foo&os_authType=basic' }
let_it_be(:project) { build(:project) }
subject(:integration) { build(:bamboo_integration, project: project, bamboo_url: bamboo_url) }
it_behaves_like Integrations::Base::Ci
it_behaves_like Integrations::ResetSecretFields
include_context Integrations::EnableSslVerification
describe 'Validations' do
context 'when active' do
before do
integration.active = true
end
it { is_expected.to validate_presence_of(:build_key) }
it { is_expected.to validate_presence_of(:bamboo_url) }
it_behaves_like 'issue tracker integration URL attribute', :bamboo_url
describe '#username' do
it 'does not validate the presence of username if password is nil' do
integration.password = nil
expect(integration).not_to validate_presence_of(:username)
end
it 'validates the presence of username if password is present' do
integration.password = 'secret'
expect(integration).to validate_presence_of(:username)
end
end
describe '#password' do
it 'does not validate the presence of password if username is nil' do
integration.username = nil
expect(integration).not_to validate_presence_of(:password)
end
it 'validates the presence of password if username is present' do
integration.username = 'john'
expect(integration).to validate_presence_of(:password)
end
end
end
context 'when inactive' do
before do
integration.active = false
end
it { is_expected.not_to validate_presence_of(:build_key) }
it { is_expected.not_to validate_presence_of(:bamboo_url) }
it { is_expected.not_to validate_presence_of(:username) }
it { is_expected.not_to validate_presence_of(:password) }
end
end
describe '#execute' do
it 'runs update and build action' do
stub_update_and_build_request
integration.execute(Gitlab::DataBuilder::Push::SAMPLE_DATA)
expect(WebMock).to have_requested(:get, stubbed_hostname(bamboo_update_url))
end
end
describe '#build_page' do
it 'returns the contents of the reactive cache' do
stub_reactive_cache(integration, { build_page: 'foo' }, 'sha', 'ref')
expect(integration.build_page('sha', 'ref')).to eq('foo')
end
end
describe '#commit_status' do
it 'returns the contents of the reactive cache' do
stub_reactive_cache(integration, { commit_status: 'foo' }, 'sha', 'ref')
expect(integration.commit_status('sha', 'ref')).to eq('foo')
end
end
shared_examples 'reactive cache calculation' do
describe '#build_page' do
subject { integration.calculate_reactive_cache('123', 'unused')[:build_page] }
it 'returns a specific URL when status is 500' do
stub_request(status: 500)
is_expected.to eq('http://gitlab.com/bamboo/browse/foo')
end
it 'returns a specific URL when response has no results' do
stub_request(body: %q({"results":{"results":{"size":"0"}}}))
is_expected.to eq('http://gitlab.com/bamboo/browse/foo')
end
it 'returns a build URL when bamboo_url has no trailing slash' do
stub_request(body: bamboo_response)
is_expected.to eq('http://gitlab.com/bamboo/browse/42')
end
context 'when bamboo_url has trailing slash' do
let(:bamboo_url) { 'http://gitlab.com/bamboo/' }
it 'returns a build URL' do
stub_request(body: bamboo_response)
is_expected.to eq('http://gitlab.com/bamboo/browse/42')
end
end
end
describe '#commit_status' do
subject { integration.calculate_reactive_cache('123', 'unused')[:commit_status] }
it 'sets commit status to :error when status is 500' do
stub_request(status: 500)
is_expected.to eq(:error)
end
it 'sets commit status to "pending" when status is 404' do
stub_request(status: 404)
is_expected.to eq('pending')
end
it 'sets commit status to "pending" when response has no results' do
stub_request(body: %q({"results":{"results":{"size":"0"}}}))
is_expected.to eq('pending')
end
it 'sets commit status to "success" when build state contains Success' do
stub_request(body: bamboo_response(build_state: 'YAY Success!'))
is_expected.to eq('success')
end
it 'sets commit status to "failed" when build state contains Failed' do
stub_request(body: bamboo_response(build_state: 'NO Failed!'))
is_expected.to eq('failed')
end
it 'sets commit status to "pending" when build state contains Pending' do
stub_request(body: bamboo_response(build_state: 'NO Pending!'))
is_expected.to eq('pending')
end
it 'sets commit status to :error when build state is unknown' do
stub_request(body: bamboo_response(build_state: 'FOO BAR!'))
is_expected.to eq(:error)
end
Gitlab::HTTP::HTTP_ERRORS.each do |http_error|
it "sets commit status to :error with a #{http_error.name} error" do
WebMock
.stub_request(:get, 'http://gitlab.com/bamboo/rest/api/latest/result/byChangeset/123?os_authType=basic')
.to_raise(http_error)
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
.with(instance_of(http_error), { project_id: project.id })
is_expected.to eq(:error)
end
end
end
end
describe '#calculate_reactive_cache' do
context 'when Bamboo API returns single result' do
let(:bamboo_response_template) do
%q({"results":{"results":{"size":"1","result":{"buildState":"%{build_state}","planResultKey":{"key":"42"}}}}})
end
it_behaves_like 'reactive cache calculation'
end
context 'when Bamboo API returns an array of results and we only consider the last one' do
let(:bamboo_response_template) do
'{"results":{"results":{"size":"2","result":[{"buildState":"%{build_state}","planResultKey":{"key":"41"}}, ' \
'{"buildState":"%{build_state}","planResultKey":{"key":"42"}}]}}}'
end
it_behaves_like 'reactive cache calculation'
end
end
describe '#avatar_url' do
it 'returns the avatar image path' do
expect(subject.avatar_url).to eq(ActionController::Base.helpers.image_path(
'illustrations/third-party-logos/integrations-logos/atlassian-bamboo.svg'
))
end
end
def stub_update_and_build_request(status: 200, body: nil)
stub_bamboo_request(bamboo_update_url, status, body)
end
def stub_request(status: 200, body: nil)
bamboo_full_url = 'http://gitlab.com/bamboo/rest/api/latest/result/byChangeset/123?os_authType=basic'
stub_bamboo_request(bamboo_full_url, status, body)
end
def stub_bamboo_request(url, status, body)
stub_full_request(url).to_return(
status: status,
headers: { 'Content-Type' => 'application/json' },
body: body
).with(basic_auth: %w[mic password])
end
def bamboo_response(build_state: 'success')
# reference: https://docs.atlassian.com/atlassian-bamboo/REST/6.2.5/#d2e786
format(bamboo_response_template, build_state: build_state)
end
end

View File

@ -149,7 +149,6 @@ RSpec.shared_examples 'a class that supports relative positioning' do
expect(items.map(&:relative_position)).to all(be_valid_position)
expect(bunch.reverse.sort_by(&:relative_position)).to eq(bunch)
expect(nils.reverse.sort_by(&:relative_position)).not_to eq(nils)
expect(bunch.map(&:relative_position)).to all(be < nils.map(&:relative_position).min)
end
@ -257,7 +256,6 @@ RSpec.shared_examples 'a class that supports relative positioning' do
expect(items.map(&:relative_position)).to all(be_valid_position)
expect(bunch.reverse.sort_by(&:relative_position)).to eq(bunch)
expect(nils.reverse.sort_by(&:relative_position)).not_to eq(nils)
expect(bunch.map(&:relative_position)).to all(be > nils.map(&:relative_position).max)
end
end

View File

@ -24,7 +24,6 @@ RSpec.describe Tooling::Danger::RubocopDiscourageTodoAddition, feature_category:
- 'ee/spec/features/billings/billing_plans_spec.rb'
- 'ee/spec/features/boards/scoped_issue_board_spec.rb'
- 'ee/spec/features/boards/user_visits_board_spec.rb'
- 'ee/spec/features/ci_shared_runner_warnings_spec.rb'
- 'ee/spec/features/bar_spec.rb'
- 'ee/spec/features/epic_boards/epic_boards_spec.rb'
YML

View File

@ -78,7 +78,7 @@ RSpec.describe BulkImports::ExportRequestWorker, feature_category: :importers do
a_hash_including(
'exception.backtrace' => anything,
'exception.class' => 'NoMethodError',
'exception.message' => /^undefined method `model_id' for nil:NilClass/,
'exception.message' => /^undefined method `model_id' for nil/,
'message' => 'Failed to fetch source entity id'
)
).twice

View File

@ -36,7 +36,7 @@ info: "To determine the technical writer assigned to the Stage/Group associated
DETAILS:
**Tier:** Premium, Ultimate
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
**Offering:** GitLab.com, GitLab Self-Managed, GitLab Dedicated
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/172757) in GitLab 17.7, all audit events can be streamed.