Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
235b50859c
commit
d849f6b8e5
|
|
@ -49,10 +49,10 @@ export default {
|
|||
</script>
|
||||
|
||||
<template>
|
||||
<div>
|
||||
<p class="gl-mb-0 gl-text-left">
|
||||
<gl-link v-if="cell.href" v-gl-tooltip="cell.tooltip" :href="cell.href" class="gl-text-default">
|
||||
{{ cell.text }}
|
||||
</gl-link>
|
||||
<span v-else>{{ cell.text }}</span>
|
||||
</div>
|
||||
</p>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -68,25 +68,24 @@ export default {
|
|||
</script>
|
||||
|
||||
<template>
|
||||
<div>
|
||||
<div class="gl-mb-3">
|
||||
<div class="gl-flex gl-flex-col gl-gap-3">
|
||||
<div class="gl-flex gl-flex-wrap gl-items-center gl-gap-2">
|
||||
<slot :runner="runner" name="runner-name">
|
||||
<runner-name :runner="runner" />
|
||||
</slot>
|
||||
|
||||
<runner-managers-badge :count="managersCount" class="gl-align-middle" />
|
||||
<runner-managers-badge :count="managersCount" />
|
||||
<gl-icon
|
||||
v-if="runner.locked"
|
||||
v-gl-tooltip
|
||||
:title="$options.i18n.I18N_LOCKED_RUNNER_DESCRIPTION"
|
||||
name="lock"
|
||||
/>
|
||||
<runner-type-badge :type="runner.runnerType" class="gl-align-middle" />
|
||||
<runner-type-badge :type="runner.runnerType" />
|
||||
</div>
|
||||
|
||||
<div
|
||||
v-if="runner.version || runner.description"
|
||||
class="gl-mb-3 gl-ml-auto gl-inline-flex gl-max-w-full gl-items-center gl-text-sm"
|
||||
class="gl-mb-2 gl-inline-flex gl-text-subtle md:gl-mb-0"
|
||||
>
|
||||
<template v-if="firstVersion">
|
||||
<div class="gl-shrink-0">
|
||||
|
|
@ -99,7 +98,7 @@ export default {
|
|||
</template>
|
||||
<tooltip-on-truncate
|
||||
v-if="runner.description"
|
||||
class="gl-block gl-truncate"
|
||||
class="gl-block gl-truncate gl-text-left"
|
||||
:class="{ 'gl-text-subtle': !runner.description }"
|
||||
:title="runner.description"
|
||||
>
|
||||
|
|
@ -107,7 +106,11 @@ export default {
|
|||
</tooltip-on-truncate>
|
||||
</div>
|
||||
|
||||
<div class="gl-text-sm">
|
||||
<div class="gl-flex gl-flex-wrap gl-gap-x-4 gl-gap-y-2 gl-text-sm">
|
||||
<runner-summary-field icon="pipeline" data-testid="job-count" :tooltip="__('Jobs')">
|
||||
<runner-job-count :runner="runner" />
|
||||
</runner-summary-field>
|
||||
|
||||
<runner-summary-field icon="clock" icon-size="sm">
|
||||
<gl-sprintf :message="$options.i18n.I18N_LAST_CONTACT_LABEL">
|
||||
<template #timeAgo>
|
||||
|
|
@ -124,15 +127,11 @@ export default {
|
|||
>
|
||||
</runner-summary-field>
|
||||
|
||||
<runner-summary-field icon="pipeline" data-testid="job-count" :tooltip="__('Jobs')">
|
||||
<runner-job-count :runner="runner" />
|
||||
</runner-summary-field>
|
||||
|
||||
<runner-summary-field icon="calendar">
|
||||
<runner-created-at :runner="runner" />
|
||||
</runner-summary-field>
|
||||
</div>
|
||||
|
||||
<runner-tags class="gl-block" :tag-list="runner.tagList" />
|
||||
<runner-tags class="gl-flex gl-flex-wrap gl-gap-2" :tag-list="runner.tagList" />
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -24,8 +24,8 @@ export default {
|
|||
</script>
|
||||
|
||||
<template>
|
||||
<div v-gl-tooltip="tooltip" class="gl-mb-3 gl-mr-4 gl-inline-block gl-text-subtle">
|
||||
<gl-icon v-if="icon" :name="icon" :size="12" variant="subtle" />
|
||||
<div v-gl-tooltip="tooltip" class="gl-flex gl-items-start gl-gap-2 gl-text-subtle">
|
||||
<gl-icon v-if="icon" :name="icon" :size="12" variant="subtle" class="gl-mt-1 gl-shrink-0" />
|
||||
<!-- display tooltip as a label for screen readers and make it unavailable for copying -->
|
||||
<span class="gl-sr-only gl-select-none">{{ tooltip }}</span>
|
||||
<slot></slot>
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ export default {
|
|||
};
|
||||
</script>
|
||||
<template>
|
||||
<span v-if="message">
|
||||
<span v-if="message" class="gl-text-left">
|
||||
<gl-sprintf :message="message">
|
||||
<template #timeAgo>
|
||||
<time-ago v-if="createdAt" :time="createdAt" />
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
<script>
|
||||
import { GlFormCheckbox, GlTableLite, GlTooltipDirective, GlSkeletonLoader } from '@gitlab/ui';
|
||||
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
||||
import { s__ } from '~/locale';
|
||||
import { __, s__ } from '~/locale';
|
||||
import HelpPopover from '~/vue_shared/components/help_popover.vue';
|
||||
import checkedRunnerIdsQuery from '../graphql/list/checked_runner_ids.query.graphql';
|
||||
import { tableField } from '../utils';
|
||||
|
|
@ -16,7 +16,7 @@ const defaultFields = [
|
|||
tableField({ key: 'status', label: s__('Runners|Status'), thClasses: ['gl-w-3/20'] }),
|
||||
tableField({ key: 'summary', label: s__('Runners|Runner configuration') }),
|
||||
tableField({ key: 'owner', label: s__('Runners|Owner'), thClasses: ['gl-w-4/20'] }),
|
||||
tableField({ key: 'actions', label: '', thClasses: ['gl-w-3/20'] }),
|
||||
tableField({ key: 'actions', label: __('Actions'), thClasses: ['md:gl-invisible', 'gl-w-3/20'] }),
|
||||
];
|
||||
|
||||
export default {
|
||||
|
|
@ -79,7 +79,7 @@ export default {
|
|||
if (this.checkable) {
|
||||
const checkboxField = tableField({
|
||||
key: 'checkbox',
|
||||
label: s__('Runners|Checkbox'),
|
||||
label: __('Select'),
|
||||
thClasses: ['gl-w-9'],
|
||||
tdClass: ['gl-text-center'],
|
||||
});
|
||||
|
|
@ -136,6 +136,7 @@ export default {
|
|||
<template #cell(checkbox)="{ item }">
|
||||
<gl-form-checkbox
|
||||
v-if="canDelete(item)"
|
||||
class="gl-flex"
|
||||
:checked="isChecked(item)"
|
||||
@change="onCheckboxChange(item, $event)"
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ query requestingReview(
|
|||
$perPage: Int!
|
||||
$afterCursor: String
|
||||
$sort: MergeRequestSort = UPDATED_DESC
|
||||
$or: UnionedMergeRequestFilterInput
|
||||
) {
|
||||
currentUser {
|
||||
id
|
||||
|
|
@ -21,6 +22,7 @@ query requestingReview(
|
|||
reviewerWildcardId: $reviewerWildcardId
|
||||
mergedAfter: $mergedAfter
|
||||
not: $not
|
||||
or: $or
|
||||
first: $perPage
|
||||
after: $afterCursor
|
||||
sort: $sort
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@ query requestingReviewCount(
|
|||
$reviewStates: [MergeRequestReviewState!]
|
||||
$reviewerWildcardId: ReviewerWildcardId
|
||||
$mergedAfter: Time
|
||||
$or: UnionedMergeRequestFilterInput
|
||||
$not: MergeRequestsResolverNegatedParams
|
||||
) {
|
||||
currentUser {
|
||||
id
|
||||
|
|
@ -13,6 +15,8 @@ query requestingReviewCount(
|
|||
reviewStates: $reviewStates
|
||||
reviewerWildcardId: $reviewerWildcardId
|
||||
mergedAfter: $mergedAfter
|
||||
or: $or
|
||||
not: $not
|
||||
) {
|
||||
count
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@ query assigneeOrReviewerCount(
|
|||
$assignedReviewStates: [MergeRequestReviewState!]
|
||||
$reviewerReviewStates: [MergeRequestReviewState!]
|
||||
$mergedAfter: Time
|
||||
$or: UnionedMergeRequestFilterInput
|
||||
$not: MergeRequestsResolverNegatedParams
|
||||
) {
|
||||
currentUser {
|
||||
id
|
||||
|
|
@ -11,6 +13,8 @@ query assigneeOrReviewerCount(
|
|||
assignedReviewStates: $assignedReviewStates
|
||||
reviewerReviewStates: $reviewerReviewStates
|
||||
mergedAfter: $mergedAfter
|
||||
or: $or
|
||||
not: $not
|
||||
) {
|
||||
count
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ query requestingReviewAuthorOrAssignee(
|
|||
$perPage: Int!
|
||||
$afterCursor: String
|
||||
$sort: MergeRequestSort = UPDATED_DESC
|
||||
$or: UnionedMergeRequestFilterInput
|
||||
) {
|
||||
currentUser {
|
||||
id
|
||||
|
|
@ -22,6 +23,7 @@ query requestingReviewAuthorOrAssignee(
|
|||
reviewerWildcardId: $reviewerWildcardId
|
||||
mergedAfter: $mergedAfter
|
||||
not: $not
|
||||
or: $or
|
||||
first: $perPage
|
||||
after: $afterCursor
|
||||
sort: $sort
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@ query requestingReviewAuthorOrAssigneeCount(
|
|||
$reviewStates: [MergeRequestReviewState!]
|
||||
$reviewerWildcardId: ReviewerWildcardId
|
||||
$mergedAfter: Time
|
||||
$or: UnionedMergeRequestFilterInput
|
||||
$not: MergeRequestsResolverNegatedParams
|
||||
) {
|
||||
currentUser {
|
||||
id
|
||||
|
|
@ -14,6 +16,8 @@ query requestingReviewAuthorOrAssigneeCount(
|
|||
reviewStates: $reviewStates
|
||||
reviewerWildcardId: $reviewerWildcardId
|
||||
mergedAfter: $mergedAfter
|
||||
or: $or
|
||||
not: $not
|
||||
) {
|
||||
count
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ query reviewRequests(
|
|||
$perPage: Int!
|
||||
$afterCursor: String
|
||||
$sort: MergeRequestSort = UPDATED_DESC
|
||||
$or: UnionedMergeRequestFilterInput
|
||||
) {
|
||||
currentUser {
|
||||
id
|
||||
|
|
@ -19,6 +20,7 @@ query reviewRequests(
|
|||
reviewStates: $reviewStates
|
||||
mergedAfter: $mergedAfter
|
||||
not: $not
|
||||
or: $or
|
||||
first: $perPage
|
||||
after: $afterCursor
|
||||
sort: $sort
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@ query reviewRequestsCount(
|
|||
$reviewState: MergeRequestReviewState
|
||||
$reviewStates: [MergeRequestReviewState!]
|
||||
$mergedAfter: Time
|
||||
$or: UnionedMergeRequestFilterInput
|
||||
$not: MergeRequestsResolverNegatedParams
|
||||
) {
|
||||
currentUser {
|
||||
id
|
||||
|
|
@ -11,6 +13,8 @@ query reviewRequestsCount(
|
|||
reviewState: $reviewState
|
||||
reviewStates: $reviewStates
|
||||
mergedAfter: $mergedAfter
|
||||
or: $or
|
||||
not: $not
|
||||
) {
|
||||
count
|
||||
}
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ export function assignedToYouBadge({ mergeRequest }) {
|
|||
return { icon: 'merge-request', text: __('Draft') };
|
||||
}
|
||||
|
||||
if (mergeRequest.reviewers?.nodes.length === 0) {
|
||||
if (mergeRequest.reviewers?.nodes.length <= 1) {
|
||||
return { icon: 'user', text: __('Reviewers needed') };
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -590,7 +590,11 @@ module MergeRequestsHelper
|
|||
helpContent: _('Reviewers left feedback, or requested changes from you, on these merge requests.'),
|
||||
query: is_author_or_assignee ? 'authorOrAssigneeMergeRequests' : 'assignedMergeRequests',
|
||||
variables: {
|
||||
reviewStates: %w[REVIEWED REQUESTED_CHANGES]
|
||||
reviewStates: %w[REVIEWED REQUESTED_CHANGES],
|
||||
not: {
|
||||
onlyReviewer: true,
|
||||
reviewerUsername: 'GitlabDuo'
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
@ -615,7 +619,10 @@ module MergeRequestsHelper
|
|||
|
||||
query: is_author_or_assignee ? 'authorOrAssigneeMergeRequests' : 'assignedMergeRequests',
|
||||
variables: {
|
||||
reviewerWildcardId: 'NONE'
|
||||
or: {
|
||||
reviewerWildcard: 'NONE',
|
||||
onlyReviewerUsername: 'GitlabDuo'
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
|
|
|
|||
|
|
@ -654,6 +654,10 @@ class MergeRequest < ApplicationRecord
|
|||
.pluck(:source_branch)
|
||||
end
|
||||
|
||||
def self.distinct_source_branches
|
||||
distinct.pluck(:source_branch)
|
||||
end
|
||||
|
||||
def self.sort_by_attribute(method, excluded_labels: [])
|
||||
case method.to_s
|
||||
when 'merged_at', 'merged_at_asc' then order_merged_at_asc
|
||||
|
|
|
|||
|
|
@ -16,13 +16,15 @@
|
|||
- c.with_body do
|
||||
- if can?(current_user, :create_runner, @project)
|
||||
- if @project_runners.any?
|
||||
%h3.gl-heading-5.gl-mt-5.gl-mb-0.gl-px-5= s_('Runners|Assigned project runners')
|
||||
%h3.gl-m-3.gl-mb-0.gl-block.gl-rounded-base.gl-bg-strong.gl-px-3.gl-py-2.gl-text-sm.gl-font-semibold.gl-text-subtle
|
||||
= s_('Runners|Assigned project runners')
|
||||
%ul.content-list{ data: { testid: 'assigned_project_runners' } }
|
||||
= render partial: 'projects/runners/runner', collection: @project_runners, as: :runner
|
||||
= paginate @project_runners, theme: "gitlab", param_name: "project_page", params: { expand_runners: true, anchor: 'js-runners-settings' }
|
||||
|
||||
- if @assignable_runners.any?
|
||||
%h3.gl-heading-5.gl-mt-5.gl-mb-0.gl-px-5= _('Other available runners')
|
||||
%h3.gl-m-3.gl-mb-0.gl-block.gl-rounded-base.gl-bg-strong.gl-px-3.gl-py-2.gl-text-sm.gl-font-semibold.gl-text-subtle
|
||||
= _('Other available runners')
|
||||
%ul.content-list{ data: { testid: 'available_project_runners' } }
|
||||
= render partial: 'projects/runners/runner', collection: @assignable_runners, as: :runner
|
||||
= paginate @assignable_runners, theme: "gitlab", param_name: "specific_page", :params => { :anchor => 'js-runners-settings'}
|
||||
|
|
|
|||
|
|
@ -62,6 +62,13 @@ module BulkImports
|
|||
if pipeline_tracker.enqueued? || pipeline_tracker.started?
|
||||
logger.info(log_attributes(message: 'Pipeline starting'))
|
||||
run
|
||||
elsif pipeline_tracker.created?
|
||||
Gitlab::ErrorTracking.log_exception(
|
||||
Pipeline::FailedError.new('Pipeline in invalid status'),
|
||||
log_attributes
|
||||
)
|
||||
else
|
||||
logger.warn(log_attributes(message: 'Pipeline in invalid status'))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
---
|
||||
name: ci_delete_archived_trace_metadata
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/500654
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/185876
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/526985
|
||||
milestone: '17.11'
|
||||
group: group::ci platform
|
||||
type: gitlab_com_derisk
|
||||
default_enabled: false
|
||||
|
|
@ -1,10 +1,10 @@
|
|||
---
|
||||
migration_job_name: BackfillPersonalAccessTokenSevenDaysNotificationSent
|
||||
description: Backfill seven_days_notification_sent_at column using data from expires_at column in personal_access_tokens table.
|
||||
description: Backfill seven_days_notification_sent_at column using data from expires_at
|
||||
column in personal_access_tokens table.
|
||||
feature_category: system_access
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165592
|
||||
milestone: '17.5'
|
||||
queued_migration_version: 20240909222743
|
||||
# Replace with the approximate date you think it's best to ensure the completion of this BBM.
|
||||
finalize_after: '2024-10-24'
|
||||
finalized_by: # version of the migration that finalized this BBM
|
||||
finalized_by: '20250401231627'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class FinalizeHkBackfillPersonalAccessTokenSevenDaysNotificationSent < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.11'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
|
||||
def up
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: 'BackfillPersonalAccessTokenSevenDaysNotificationSent',
|
||||
table_name: :personal_access_tokens,
|
||||
column_name: :id,
|
||||
job_arguments: [],
|
||||
finalize: true
|
||||
)
|
||||
end
|
||||
|
||||
def down; end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
da8dc0be0932b067a4f4ccc62b9de3e24046cb94e837b39b87e0f72bd61a275d
|
||||
|
|
@ -20,7 +20,7 @@ title: Group integrations API
|
|||
|
||||
Use this API to work with external services that integrate with GitLab.
|
||||
|
||||
This API requires an access token with the Maintainer or Owner role.
|
||||
This API requires an access token with at least the Maintainer role.
|
||||
|
||||
## List all active integrations
|
||||
|
||||
|
|
|
|||
|
|
@ -38175,6 +38175,8 @@ paths:
|
|||
responses:
|
||||
'200':
|
||||
description: Download allowed
|
||||
'302':
|
||||
description: Found
|
||||
'401':
|
||||
description: Unauthorized
|
||||
'403':
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ title: Project integrations API
|
|||
|
||||
Use this API to work with external services that integrate with GitLab.
|
||||
|
||||
This API requires an access token with the Maintainer or Owner role.
|
||||
This API requires an access token with at least the Maintainer role.
|
||||
|
||||
## List all active integrations
|
||||
|
||||
|
|
|
|||
|
|
@ -465,7 +465,7 @@ active at the same time.
|
|||
|
||||
To enable Opsgenie integration:
|
||||
|
||||
1. Sign in as a user with the Maintainer or Owner role.
|
||||
1. Sign in as a user with at least the Maintainer role.
|
||||
1. Go to **Monitor > Alerts**.
|
||||
1. In the **Integrations** select box, select **Opsgenie**.
|
||||
1. Select the **Active** toggle.
|
||||
|
|
|
|||
|
|
@ -17,6 +17,8 @@ In accordance with [the GitLab Subscription Agreement](https://about.gitlab.com/
|
|||
GitLab reviews your seat usage and sends you an invoice for any overages.
|
||||
This review occurs either quarterly (quarterly reconciliation process) or annually (annual true-up process).
|
||||
|
||||
To learn more about how GitLab bills GitLab.com users, see [How seat usage is determined](gitlab_com/_index.md#how-seat-usage-is-determined). For GitLab Self-Managed users, see [How GitLab bills for users](self_managed/_index.md#how-gitlab-bills-for-users).
|
||||
|
||||
To prevent overages, you can turn on restricted access for [your group](../user/group/manage.md#turn-on-restricted-access)
|
||||
or [your instance](../administration/settings/sign_up_restrictions.md#turn-on-restricted-access).
|
||||
This setting restricts groups from adding new billable users when there are no seats left in the subscription.
|
||||
|
|
@ -92,7 +94,7 @@ You are excluded from quarterly reconciliation if you:
|
|||
- Are enrolled in a program that provides a Free tier such as the GitLab for Education,
|
||||
GitLab for Open Source Program, or GitLab for Startups.
|
||||
|
||||
If you are excluded from quarterly reconciliation and not on a Free tier, your true-ups are reconciled annually.
|
||||
If you are excluded from quarterly reconciliation and not on a Free tier, your true-ups are reconciled annually. Alternatively, you can reconcile any overages by [purchasing additional seats](gitlab_com/_index.md#add-seats-to-subscription).
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ To configure GitLab Runner to use the GKE:
|
|||
|
||||
Before you can configure GitLab Runner to use the GKE you must:
|
||||
|
||||
- Have a project where you have the Maintainer or Owner role. If you don't have a project, you can [create it](../../user/project/_index.md).
|
||||
- Have a project where you have at least the Maintainer role. If you don't have a project, you can [create it](../../user/project/_index.md).
|
||||
- [Obtain the project runner authentication token](../../ci/runners/runners_scope.md#create-a-project-runner-with-a-runner-authentication-token).
|
||||
- Install GitLab Runner.
|
||||
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ example project named "Excelsior", and creates a minimal approval workflow for t
|
|||
|
||||
## Before you begin
|
||||
|
||||
- You must have the Maintainer or Owner role.
|
||||
- You must have at least the Maintainer role.
|
||||
- You need a list of managers and their email addresses.
|
||||
- You need a list of your backend and frontend engineers, and their email addresses.
|
||||
- You understand [semantic versioning](https://semver.org/) for branch names.
|
||||
|
|
|
|||
|
|
@ -5,12 +5,12 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
title: 'Tutorial: Generate a software bill of materials with GitLab package registry'
|
||||
---
|
||||
|
||||
This tutorial shows you how to generate a software bill of materials (SBOM) in CycloneDX format with a CI/CD pipeline. The pipeline you'll build collects packages across multiple projects in a group, providing you with a comprehensive view of the dependencies in related projects.
|
||||
This tutorial shows you how to generate a software bill of materials (SBOM) in CycloneDX format with a CI/CD pipeline. The pipeline you'll build collects packages across multiple projects in a group, providing you with a comprehensive view of the dependencies in related projects.
|
||||
|
||||
You'll create a virtual Python environment to complete this tutorial, but you can apply the same approach to other supported package types, too.
|
||||
|
||||
## What is a software bill of materials?
|
||||
|
||||
|
||||
An SBOM is a machine-readable inventory of all the software components that comprise a software product. The SBOM might include:
|
||||
|
||||
- Direct and indirect dependencies
|
||||
|
|
@ -43,12 +43,12 @@ CycloneDX supports multiple output formats, including JSON, XML, and Protocol Bu
|
|||
|
||||
To complete this tutorial, you need:
|
||||
|
||||
- A group with the Maintainer or Owner role.
|
||||
- A group with at least the Maintainer role.
|
||||
- Access to GitLab CI/CD.
|
||||
- A configured [GitLab Runner](../../../ci/runners/_index.md#runner-categories) if you're using a GitLab Self-Managed instance. If you're using GitLab.com, you can skip this requirement.
|
||||
- Optional. A [group deploy token](../../project/deploy_tokens/_index.md) to authenticate requests to the package registry.
|
||||
- Optional. A [group deploy token](../../project/deploy_tokens/_index.md) to authenticate requests to the package registry.
|
||||
|
||||
## Steps
|
||||
## Steps
|
||||
|
||||
This tutorial involves two sets of steps to complete:
|
||||
|
||||
|
|
@ -75,10 +75,10 @@ Before implementing this solution, be aware that:
|
|||
|
||||
### Add the base pipeline configuration
|
||||
|
||||
First, set up the base image that defines
|
||||
the variables and stages used throughout the pipeline.
|
||||
First, set up the base image that defines
|
||||
the variables and stages used throughout the pipeline.
|
||||
|
||||
In the following sections, you'll build out
|
||||
In the following sections, you'll build out
|
||||
the pipeline by adding the configuration for each stage.
|
||||
|
||||
In your project:
|
||||
|
|
@ -158,45 +158,45 @@ collect_group_packages:
|
|||
stage: collect
|
||||
script: |
|
||||
echo "[]" > "${SBOM_OUTPUT_DIR}/packages.json"
|
||||
|
||||
|
||||
GROUP_PATH_ENCODED=$(echo "${GROUP_PATH}" | sed 's|/|%2F|g')
|
||||
PACKAGES_URL="${CI_API_V4_URL}/groups/${GROUP_PATH_ENCODED}/packages"
|
||||
|
||||
|
||||
# Optional exclusion list - you can add package types you want to exclude
|
||||
# EXCLUDE_TYPES="terraform"
|
||||
|
||||
|
||||
page=1
|
||||
while true; do
|
||||
# Fetch all packages without specifying type, with pagination
|
||||
response=$(curl --silent --header "${AUTH_HEADER:-"JOB-TOKEN: $CI_JOB_TOKEN"}" \
|
||||
"${PACKAGES_URL}?per_page=100&page=${page}")
|
||||
|
||||
|
||||
if ! echo "$response" | jq 'type == "array"' > /dev/null 2>&1; then
|
||||
echo "Error in API response for page $page"
|
||||
break
|
||||
fi
|
||||
|
||||
|
||||
count=$(echo "$response" | jq '. | length')
|
||||
if [ "$count" -eq 0 ]; then
|
||||
break
|
||||
fi
|
||||
|
||||
|
||||
# Filter packages if EXCLUDE_TYPES is set
|
||||
if [ -n "${EXCLUDE_TYPES:-}" ]; then
|
||||
filtered_response=$(echo "$response" | jq --arg types "$EXCLUDE_TYPES" '[.[] | select(.package_type | inside($types | split(" ")) | not)]')
|
||||
response="$filtered_response"
|
||||
count=$(echo "$response" | jq '. | length')
|
||||
fi
|
||||
|
||||
|
||||
# Merge this page of results with existing data
|
||||
jq -s '.[0] + .[1]' "${SBOM_OUTPUT_DIR}/packages.json" <(echo "$response") > "${SBOM_OUTPUT_DIR}/packages.tmp.json"
|
||||
mv "${SBOM_OUTPUT_DIR}/packages.tmp.json" "${SBOM_OUTPUT_DIR}/packages.json"
|
||||
|
||||
|
||||
# Move to next page if we got a full page of results
|
||||
if [ "$count" -lt 100 ]; then
|
||||
break
|
||||
fi
|
||||
|
||||
|
||||
page=$((page + 1))
|
||||
done
|
||||
artifacts:
|
||||
|
|
@ -240,30 +240,30 @@ aggregate_sboms:
|
|||
"""Process version information by aggregating packages with same name and type"""
|
||||
version_history = {}
|
||||
package_versions = {} # Dict to group packages by name and type
|
||||
|
||||
|
||||
try:
|
||||
with open(packages_file, 'r') as f:
|
||||
packages = json.load(f)
|
||||
if not isinstance(packages, list):
|
||||
return version_history
|
||||
|
||||
|
||||
# First, group packages by name and type
|
||||
for package in packages:
|
||||
key = f"{package.get('name')}:{package.get('package_type')}"
|
||||
if key not in package_versions:
|
||||
package_versions[key] = []
|
||||
|
||||
|
||||
package_versions[key].append({
|
||||
'id': package.get('id'),
|
||||
'version': package.get('version', 'unknown'),
|
||||
'created_at': package.get('created_at')
|
||||
})
|
||||
|
||||
|
||||
# Then process each group to create version history
|
||||
for package_key, versions in package_versions.items():
|
||||
# Sort versions by creation date, newest first
|
||||
versions.sort(key=lambda x: x.get('created_at', ''), reverse=True)
|
||||
|
||||
|
||||
# Use the first package's ID as the key (newest version)
|
||||
if versions:
|
||||
package_id = str(versions[0]['id'])
|
||||
|
|
@ -285,18 +285,18 @@ aggregate_sboms:
|
|||
'total_packages': 0,
|
||||
'package_types': {}
|
||||
}
|
||||
|
||||
|
||||
try:
|
||||
with open(package_file, 'r') as f:
|
||||
packages = json.load(f)
|
||||
if not isinstance(packages, list):
|
||||
return [], package_stats
|
||||
|
||||
|
||||
for package in packages:
|
||||
package_stats['total_packages'] += 1
|
||||
pkg_type = package.get('package_type', 'unknown')
|
||||
package_stats['package_types'][pkg_type] = package_stats['package_types'].get(pkg_type, 0) + 1
|
||||
|
||||
|
||||
component = {
|
||||
'type': 'library',
|
||||
'name': package['name'],
|
||||
|
|
@ -308,14 +308,14 @@ aggregate_sboms:
|
|||
'value': package.get('_links', {}).get('web_path', '')
|
||||
}]
|
||||
}
|
||||
|
||||
|
||||
key = f"{component['name']}:{component['version']}"
|
||||
if key not in merged_components:
|
||||
merged_components[key] = component
|
||||
except Exception as e:
|
||||
print(f"Error merging package data: {e}")
|
||||
return [], package_stats
|
||||
|
||||
|
||||
return list(merged_components.values()), package_stats
|
||||
|
||||
# Main processing
|
||||
|
|
@ -385,16 +385,16 @@ publish_sbom:
|
|||
stage: publish
|
||||
script: |
|
||||
STATS=$(cat "${SBOM_OUTPUT_DIR}/package_stats.json")
|
||||
|
||||
|
||||
# Upload generated files
|
||||
curl --header "${AUTH_HEADER:-"JOB-TOKEN: $CI_JOB_TOKEN"}" \
|
||||
--upload-file "${SBOM_OUTPUT_DIR}/merged_sbom.${OUTPUT_TYPE}" \
|
||||
"${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/sbom/${CI_COMMIT_SHA}/merged_sbom.${OUTPUT_TYPE}"
|
||||
|
||||
|
||||
curl --header "${AUTH_HEADER:-"JOB-TOKEN: $CI_JOB_TOKEN"}" \
|
||||
--upload-file "${SBOM_OUTPUT_DIR}/package_stats.json" \
|
||||
"${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/sbom/${CI_COMMIT_SHA}/package_stats.json"
|
||||
|
||||
|
||||
# Add package description
|
||||
curl --header "${AUTH_HEADER:-"JOB-TOKEN: $CI_JOB_TOKEN"}" \
|
||||
--header "Content-Type: application/json" \
|
||||
|
|
@ -429,7 +429,7 @@ To access the generated files:
|
|||
1. Find the package named `sbom`.
|
||||
1. Download the SBOM and statistics files.
|
||||
|
||||
### Using the SBOM file
|
||||
### Using the SBOM file
|
||||
|
||||
The SBOM file follows the [CycloneDX 1.4 JSON specification](https://cyclonedx.org/docs/1.4/json/), and provides details about published packages, package versions, and artifacts in your group's package registry.
|
||||
|
||||
|
|
@ -447,7 +447,7 @@ When working with CycloneDX files, consider using the following tools:
|
|||
|
||||
### Using the statistics file
|
||||
|
||||
The statistics file provides package registry analytics and activity tracking.
|
||||
The statistics file provides package registry analytics and activity tracking.
|
||||
|
||||
For example, to analyze your package registry, you can:
|
||||
|
||||
|
|
@ -463,7 +463,7 @@ To track package registry activity, you can:
|
|||
|
||||
You can use a CLI tool like `jq` with the statistics file
|
||||
to generate analytics or activity information in a readable
|
||||
JSON format.
|
||||
JSON format.
|
||||
|
||||
The following code block lists several examples of `jq` commands you can run against the statistics file for general analysis or reporting purposes:
|
||||
|
||||
|
|
@ -492,7 +492,7 @@ To schedule the pipeline:
|
|||
|
||||
1. In your project, go to **Build > Pipeline schedules**.
|
||||
1. Select **Create a new pipeline schedule** and fill in the form:
|
||||
- From the **Cron timezone** dropdown list, select a timezone.
|
||||
- From the **Cron timezone** dropdown list, select a timezone.
|
||||
- Select an **Interval Pattern**, or add a **Custom** pattern using [cron syntax](../../../ci/pipelines/schedules.md).
|
||||
- Select the branch or tag for the pipeline.
|
||||
- Under **Variables**, enter any number of CI/CD variables to the schedule.
|
||||
|
|
@ -514,7 +514,7 @@ If you encounter authentication errors:
|
|||
|
||||
If you're missing package types:
|
||||
|
||||
- Make sure your [deploy token has access](../../project/deploy_tokens/_index.md#pull-packages-from-a-package-registry) to all package types.
|
||||
- Make sure your [deploy token has access](../../project/deploy_tokens/_index.md#pull-packages-from-a-package-registry) to all package types.
|
||||
- Check if the package type is enabled in your group settings.
|
||||
|
||||
### Memory issues in the `aggregate` stage
|
||||
|
|
|
|||
|
|
@ -175,7 +175,7 @@ Only direct members of a project are imported. Inherited or shared members of a
|
|||
|
||||
Prerequisites:
|
||||
|
||||
- You must have the Maintainer or Owner role.
|
||||
- You must have at least the Maintainer role.
|
||||
|
||||
If the importing member's role for the target project is:
|
||||
|
||||
|
|
|
|||
|
|
@ -122,7 +122,7 @@ For a project that was created by `Group 1`:
|
|||
|
||||
Prerequisites:
|
||||
|
||||
- You must have the Maintainer or Owner role.
|
||||
- You must have at least the Maintainer role.
|
||||
- Sharing the project with other groups must not be prevented.
|
||||
- You must be a member of the invited group or subgroup.
|
||||
|
||||
|
|
|
|||
|
|
@ -382,6 +382,7 @@ module API
|
|||
|
||||
desc 'Download the artifacts file for job' do
|
||||
http_codes [[200, 'Download allowed'],
|
||||
[302, 'Found'],
|
||||
[401, 'Unauthorized'],
|
||||
[403, 'Forbidden'],
|
||||
[404, 'Artifact not found']]
|
||||
|
|
|
|||
|
|
@ -26,8 +26,7 @@ module Gitlab
|
|||
|
||||
validate_archived_trace unless Gitlab::FIPS.enabled?
|
||||
|
||||
trace_metadata.destroy! if trace_metadata.successfully_archived? && \
|
||||
Feature.enabled?(:ci_delete_archived_trace_metadata, job.project)
|
||||
trace_metadata.destroy! if trace_metadata.successfully_archived?
|
||||
end
|
||||
|
||||
private
|
||||
|
|
@ -62,10 +61,11 @@ module Gitlab
|
|||
def validate_archived_trace
|
||||
return unless remote_checksum
|
||||
|
||||
trace_metadata.update!(remote_checksum: remote_checksum)
|
||||
trace_metadata.remote_checksum = remote_checksum
|
||||
|
||||
unless trace_metadata.remote_checksum_valid?
|
||||
metrics.increment_error_counter(error_reason: :archive_invalid_checksum)
|
||||
trace_metadata.save!
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -50939,9 +50939,6 @@ msgstr ""
|
|||
msgid "Runners|Capacity of 1 enables warm HA through Auto Scaling group re-spawn. Capacity of 2 enables hot HA because the service is available even when a node is lost. Capacity of 3 or more enables hot HA and manual scaling of runner fleet."
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|Checkbox"
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|Choose an executor when prompted by the command line. Executors run builds in different environments. %{linkStart}Not sure which one to select?%{linkEnd}"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -13,6 +13,10 @@ FactoryBot.define do
|
|||
fetched_objects_count { 1 }
|
||||
imported_objects_count { 1 }
|
||||
|
||||
trait :created do
|
||||
status { 0 }
|
||||
end
|
||||
|
||||
trait :started do
|
||||
status { 1 }
|
||||
end
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ describe('RunnerList', () => {
|
|||
expect(headers[2].findComponent(HelpPopover).exists()).toBe(true);
|
||||
expect(headers[2].text()).toBe('Owner');
|
||||
|
||||
expect(headers[3].text()).toBe(''); // actions has no label
|
||||
expect(headers[3].text()).toBe('Actions');
|
||||
});
|
||||
|
||||
it('Sets runner id as a row key', () => {
|
||||
|
|
|
|||
|
|
@ -1532,12 +1532,13 @@ RSpec.describe API::Helpers, feature_category: :shared do
|
|||
end
|
||||
|
||||
it 'redirects to a CDN-fronted URL' do
|
||||
expect(helper).to receive(:redirect)
|
||||
expect_next_instance_of(ObjectStorage::CDN::FileUrl) do |instance|
|
||||
expect(instance).to receive(:url).and_call_original
|
||||
end
|
||||
|
||||
expect(Gitlab::ApplicationContext).to receive(:push).with(artifact: artifact.file.model).and_call_original
|
||||
expect(Gitlab::ApplicationContext).to receive(:push).with(artifact_used_cdn: false).and_call_original
|
||||
expect(helper).to receive(:redirect)
|
||||
|
||||
subject
|
||||
end
|
||||
|
|
@ -1546,9 +1547,9 @@ RSpec.describe API::Helpers, feature_category: :shared do
|
|||
let(:is_head_request) { true }
|
||||
|
||||
it 'redirects to a CDN-fronted URL' do
|
||||
expect(helper).to receive(:redirect)
|
||||
expect(ObjectStorage::S3).to receive(:signed_head_url).and_call_original
|
||||
expect(Gitlab::ApplicationContext).to receive(:push).with(artifact: artifact.file.model).and_call_original
|
||||
expect(helper).to receive(:redirect)
|
||||
|
||||
subject
|
||||
end
|
||||
|
|
|
|||
|
|
@ -44,19 +44,6 @@ RSpec.describe Gitlab::Ci::Trace::Archive, feature_category: :scalability do
|
|||
|
||||
expect(job.reload.trace_metadata).to be_nil
|
||||
end
|
||||
|
||||
context 'when FF `ci_delete_archived_trace_metadata` is disabled' do
|
||||
before do
|
||||
stub_feature_flags(ci_delete_archived_trace_metadata: false)
|
||||
end
|
||||
|
||||
it 'does not delete trace metadata record' do
|
||||
expect { subject.execute!(stream) }
|
||||
.not_to change { Ci::BuildTraceMetadata.count }
|
||||
|
||||
expect(job.reload.trace_metadata).to eq(trace_metadata)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'local checksum only' do
|
||||
|
|
|
|||
|
|
@ -305,6 +305,23 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
|
|||
end
|
||||
end
|
||||
|
||||
describe '.distinct_source_branches' do
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
let_it_be(:mr1) { create(:merge_request, source_branch: 'feature-1', source_project: project) }
|
||||
let_it_be(:mr2) { create(:merge_request, source_branch: 'feature-2', source_project: project) }
|
||||
let_it_be(:mr3) { create(:merge_request, source_branch: 'feature-1', target_branch: 'another-branch', source_project: project) }
|
||||
let_it_be(:mr4) { create(:merge_request, source_branch: 'feature-3', source_project: project) }
|
||||
|
||||
it 'returns an array of unique source branch names' do
|
||||
expect(described_class.distinct_source_branches).to include('feature-1', 'feature-2', 'feature-3')
|
||||
end
|
||||
|
||||
it 'returns only source branch names once even if used in multiple MRs' do
|
||||
branches = described_class.distinct_source_branches
|
||||
expect(branches.count('feature-1')).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.by_sorted_source_branches' do
|
||||
let(:fork_for_project) { fork_project(project) }
|
||||
|
||||
|
|
|
|||
|
|
@ -230,6 +230,29 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when pipeline is created' do
|
||||
let(:pipeline_tracker) do
|
||||
create(
|
||||
:bulk_import_tracker,
|
||||
:created,
|
||||
entity: entity,
|
||||
pipeline_name: 'FakePipeline'
|
||||
)
|
||||
end
|
||||
|
||||
it 'no-ops and returns' do
|
||||
expect(described_class).not_to receive(:run)
|
||||
|
||||
expect(Gitlab::ErrorTracking).to receive(:log_exception)
|
||||
.with(
|
||||
instance_of(BulkImports::Pipeline::FailedError),
|
||||
a_hash_including(tracker_state: 'created')
|
||||
).and_call_original
|
||||
|
||||
worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when pipeline is finished' do
|
||||
let(:pipeline_tracker) do
|
||||
create(
|
||||
|
|
@ -243,6 +266,13 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
|
|||
it 'no-ops and returns' do
|
||||
expect(described_class).not_to receive(:run)
|
||||
|
||||
expect_next_instance_of(BulkImports::Logger) do |logger|
|
||||
expect(logger).to receive(:warn).with(a_hash_including(
|
||||
message: 'Pipeline in invalid status',
|
||||
tracker_state: 'finished'
|
||||
)).and_call_original
|
||||
end
|
||||
|
||||
worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
end
|
||||
end
|
||||
|
|
@ -260,6 +290,13 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
|
|||
it 'no-ops and returns' do
|
||||
expect(described_class).not_to receive(:run)
|
||||
|
||||
expect_next_instance_of(BulkImports::Logger) do |logger|
|
||||
expect(logger).to receive(:warn).with(a_hash_including(
|
||||
message: 'Pipeline in invalid status',
|
||||
tracker_state: 'skipped'
|
||||
)).and_call_original
|
||||
end
|
||||
|
||||
worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
end
|
||||
end
|
||||
|
|
@ -277,6 +314,13 @@ RSpec.describe BulkImports::PipelineWorker, feature_category: :importers do
|
|||
it 'no-ops and returns' do
|
||||
expect(described_class).not_to receive(:run)
|
||||
|
||||
expect_next_instance_of(BulkImports::Logger) do |logger|
|
||||
expect(logger).to receive(:warn).with(a_hash_including(
|
||||
message: 'Pipeline in invalid status',
|
||||
tracker_state: 'canceled'
|
||||
)).and_call_original
|
||||
end
|
||||
|
||||
worker.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2015,8 +2015,10 @@ HCL:
|
|||
- ".nomad"
|
||||
- ".tf"
|
||||
- ".tfvars"
|
||||
- ".tofu"
|
||||
- ".workflow"
|
||||
aliases:
|
||||
- opentofu
|
||||
- terraform
|
||||
ace_mode: ruby
|
||||
codemirror_mode: ruby
|
||||
|
|
|
|||
Loading…
Reference in New Issue