Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-05-21 00:55:32 +00:00
parent 10c8bfaa82
commit e67a224437
65 changed files with 451 additions and 178 deletions

View File

@ -28,9 +28,12 @@ export default {
if (!mergeRequest) return;
const isAssignee = mergeRequest.assignees.nodes.some((u) => u.id === this.currentUserId);
const isAuthor = mergeRequest.author.id === this.currentUserId;
const isReviewer = mergeRequest.reviewers.nodes.some((u) => u.id === this.currentUserId);
if (isAssignee) eventHub.$emit('refetch.mergeRequests', 'assignedMergeRequests');
if (isAssignee || isAuthor)
eventHub.$emit('refetch.mergeRequests', 'authorOrAssigneeMergeRequests');
if (isReviewer) eventHub.$emit('refetch.mergeRequests', 'reviewRequestedMergeRequests');
},
},

View File

@ -2,6 +2,9 @@ subscription currentUserMergeRequestUpdatedSubscription($userId: UserID!) {
userMergeRequestUpdated(userId: $userId) {
... on MergeRequest {
id
author {
id
}
assignees {
nodes {
id

View File

@ -1,5 +1,5 @@
<script>
import { GlFormGroup, GlFormInput, GlFormTextarea } from '@gitlab/ui';
import { GlFormGroup, GlFormInput, GlFormTextarea, GlSprintf } from '@gitlab/ui';
import FormUrlApp from './form_url_app.vue';
import FormCustomHeaders from './form_custom_headers.vue';
@ -9,6 +9,7 @@ export default {
GlFormGroup,
GlFormInput,
GlFormTextarea,
GlSprintf,
FormUrlApp,
FormCustomHeaders,
},
@ -38,11 +39,17 @@ export default {
required: false,
default: '',
},
initialSecretToken: {
type: String,
required: false,
default: '',
},
},
data() {
return {
name: this.initialName,
description: this.initialDescription,
secretToken: this.initialSecretToken,
};
},
};
@ -73,6 +80,32 @@ export default {
</gl-form-group>
<form-url-app :initial-url="initialUrl" :initial-url-variables="initialUrlVariables" />
<gl-form-group :label="s__('Webhooks|Secret token')" label-for="webhook-secret-token">
<template #description>
<gl-sprintf
:message="
s__(
'Webhooks|Used to validate received payloads. Sent with the request in the %{codeStart}X-Gitlab-Token%{codeEnd} HTTP header.',
)
"
>
<template #code="{ content }">
<code>{{ content }}</code>
</template>
</gl-sprintf>
</template>
<gl-form-input
id="webhook-secret-token"
v-model="secretToken"
name="hook[token]"
type="password"
autocomplete="new-password"
class="gl-form-input-xl"
data-testid="webhook-secret-token"
/>
</gl-form-group>
<form-custom-headers :initial-custom-headers="initialCustomHeaders" />
</div>
</template>

View File

@ -14,6 +14,7 @@ export default () => {
description: initialDescription,
url: initialUrl,
urlVariables,
secretToken: initialSecretToken,
customHeaders,
} = el.dataset;
@ -25,6 +26,7 @@ export default () => {
props: {
initialName,
initialDescription,
initialSecretToken,
initialUrl,
initialUrlVariables: JSON.parse(urlVariables),
initialCustomHeaders: JSON.parse(customHeaders),

View File

@ -162,8 +162,8 @@ export default {
});
return selectOptions;
},
workItemsAlphaEnabled() {
return this.glFeatures.workItemsAlpha;
workItemEpicMilestonesEnabled() {
return this.glFeatures.workItemEpicMilestones;
},
isSelectedWorkItemTypeEpic() {
return this.selectedWorkItemType?.name === WORK_ITEM_TYPE_NAME_EPIC;
@ -228,7 +228,7 @@ export default {
return this.widgetsWithExistingDataList.reduce((widgets, item) => {
// Skip adding milestone to widget difference if upgrading to epic
if (
this.workItemsAlphaEnabled &&
this.workItemEpicMilestonesEnabled &&
this.isSelectedWorkItemTypeEpic &&
item.type === WIDGET_TYPE_MILESTONE
) {
@ -244,7 +244,7 @@ export default {
noValuePresentWidgets() {
return this.widgetsWithExistingDataList.reduce((acc, item) => {
if (
this.workItemsAlphaEnabled &&
this.workItemEpicMilestonesEnabled &&
this.isSelectedWorkItemTypeEpic &&
this.milestoneWidget?.projectMilestone &&
item.type === WIDGET_TYPE_MILESTONE

View File

@ -7,7 +7,7 @@ class Groups::MilestonesController < Groups::ApplicationController
before_action :authorize_admin_milestones!, only: [:edit, :new, :create, :update, :destroy]
before_action do
push_force_frontend_feature_flag(:work_items_alpha, !!group&.work_items_alpha_feature_flag_enabled?)
push_force_frontend_feature_flag(:work_item_epic_milestones, !!group&.work_item_epic_milestones_feature_flag_enabled?)
end
feature_category :team_planning

View File

@ -20,7 +20,7 @@ class Projects::MilestonesController < Projects::ApplicationController
before_action :authorize_promote_milestone!, only: :promote
before_action do
push_force_frontend_feature_flag(:work_items_alpha, !!@project&.work_items_alpha_feature_flag_enabled?)
push_force_frontend_feature_flag(:work_item_epic_milestones, !!@project&.work_item_epic_milestones_feature_flag_enabled?)
end
respond_to :html

View File

@ -5,6 +5,7 @@ module HooksHelper
{
name: hook.name,
description: hook.description,
secret_token: hook.masked_token, # always use masked_token to avoid exposing secret_token to frontend
url: hook.url,
url_variables: Gitlab::Json.dump(hook.url_variables.keys.map { { key: _1 } }),
custom_headers: Gitlab::Json.dump(hook.custom_headers.keys.map { { key: _1, value: WebHook::SECRET_MASK } })

View File

@ -295,7 +295,7 @@ module TimeboxesHelper
limit = Milestone::DISPLAY_ISSUES_LIMIT
link_options = { milestone_title: @milestone.title }
if Feature.enabled?(:work_items_alpha, current_user)
if Feature.enabled?(:work_item_epic_milestones, current_user)
message = _('Showing %{limit} of %{total_count} items. ') % { limit: limit, total_count: total_count }
message += link_to(_('View all'), milestones_issues_path(link_options))
else

View File

@ -1186,7 +1186,11 @@ module Ci
end
def can_generate_codequality_reports?
complete_and_has_reports?(Ci::JobArtifact.of_report_type(:codequality))
if Feature.enabled?(:show_child_reports_in_mr_page, project)
complete_and_has_self_or_descendant_reports?(Ci::JobArtifact.of_report_type(:codequality))
else
complete_and_has_reports?(Ci::JobArtifact.of_report_type(:codequality))
end
end
def test_report_summary
@ -1212,9 +1216,17 @@ module Ci
end
def codequality_reports
Gitlab::Ci::Reports::CodequalityReports.new.tap do |codequality_reports|
latest_report_builds(Ci::JobArtifact.of_report_type(:codequality)).each do |build|
build.collect_codequality_reports!(codequality_reports)
if Feature.enabled?(:show_child_reports_in_mr_page, project)
Gitlab::Ci::Reports::CodequalityReports.new.tap do |codequality_reports|
latest_report_builds_in_self_and_project_descendants(Ci::JobArtifact.of_report_type(:codequality)).each do |build|
build.collect_codequality_reports!(codequality_reports)
end
end
else
Gitlab::Ci::Reports::CodequalityReports.new.tap do |codequality_reports|
latest_report_builds(Ci::JobArtifact.of_report_type(:codequality)).each do |build|
build.collect_codequality_reports!(codequality_reports)
end
end
end
end

View File

@ -67,7 +67,7 @@ module Milestoneish
container = project || group
work_items_finder_params[:issue_types] =
container.work_items_alpha_feature_flag_enabled? ? %w[issue epic task] : %w[issue task]
container.work_item_epic_milestones_feature_flag_enabled? ? %w[issue epic task] : %w[issue task]
work_item_ids = ::WorkItems::WorkItemsFinder.new(user, work_items_finder_params)
.execute.preload_associated_models

View File

@ -1068,6 +1068,10 @@ class Group < Namespace
feature_flag_enabled_for_self_or_ancestor?(:work_items_alpha)
end
def work_item_epic_milestones_feature_flag_enabled?
feature_flag_enabled_for_self_or_ancestor?(:work_item_epic_milestones, type: :beta)
end
def work_item_status_feature_available?
feature_flag_enabled_for_self_or_ancestor?(:work_item_status_feature_flag, type: :wip) &&
licensed_feature_available?(:work_item_status)

View File

@ -3382,6 +3382,10 @@ class Project < ApplicationRecord
group&.work_items_alpha_feature_flag_enabled? || Feature.enabled?(:work_items_alpha)
end
def work_item_epic_milestones_feature_flag_enabled?
group&.work_item_epic_milestones_feature_flag_enabled? || Feature.enabled?(:work_item_epic_milestones, type: :beta)
end
def work_item_status_feature_available?
(group&.work_item_status_feature_available? || Feature.enabled?(:work_item_status_feature_flag, type: :wip)) &&
licensed_feature_available?(:work_item_status)

View File

@ -6,7 +6,7 @@
.gl-grow-2
.gl-flex.gl-flex-col.gl-text-default
= title
- if Feature.enabled?(:work_items_alpha, current_user)
- if Feature.enabled?(:work_item_epic_milestones, current_user)
.gl-text-subtle.gl-text-sm
= subtitle
.gl-ml-3.gl-shrink-0.gl-font-bold.gl-whitespace-nowrap.gl-text-subtle

View File

@ -7,7 +7,7 @@
- c.with_body do
= milestone_issues_count_message(@milestone)
- if Feature.enabled?(:work_items_alpha, current_user)
- if Feature.enabled?(:work_item_epic_milestones, current_user)
.row.gl-mt-3
.col-md-4
= render 'shared/milestones/issuables', args.merge(title: s_('Milestones|Unstarted'), subtitle: _('(open and unassigned)'), issuables: issues.opened.unassigned, id: 'unassigned', show_counter: true)

View File

@ -78,7 +78,7 @@
= sprite_icon('issues')
%span= milestone.issues_visible_to_user(current_user).count
.title.hide-collapsed
- if Feature.enabled?(:work_items_alpha, current_user)
- if Feature.enabled?(:work_item_epic_milestones, current_user)
= s_('MilestoneSidebar|Work items')
= gl_badge_tag milestone.sorted_issues(current_user).length, variant: :muted
- else
@ -91,14 +91,14 @@
%span.milestone-stat
= link_to milestones_browse_issuables_path(milestone, type: :issues) do
= s_('MilestoneSidebar|Open:')
- if Feature.enabled?(:work_items_alpha, current_user)
- if Feature.enabled?(:work_item_epic_milestones, current_user)
= milestone.sorted_issues(current_user).opened.length
- else
= milestone.issues_visible_to_user(current_user).opened.count
%span.milestone-stat
= link_to milestones_browse_issuables_path(milestone, type: :issues, state: 'closed') do
= s_('MilestoneSidebar|Closed:')
- if Feature.enabled?(:work_items_alpha, current_user)
- if Feature.enabled?(:work_item_epic_milestones, current_user)
= milestone.sorted_issues(current_user).closed.length
- else
= milestone.issues_visible_to_user(current_user).closed.count

View File

@ -7,7 +7,7 @@
= sprite_icon('chevron-lg-right', size: 12)
= gl_tabs_nav({ class: %w[scrolling-tabs js-milestone-tabs] }) do
= gl_tab_link_to '#tab-issues', item_active: true, data: { endpoint: milestone_tab_path(milestone, 'issues', show_project_name: show_project_name) } do
- if Feature.enabled?(:work_items_alpha, current_user)
- if Feature.enabled?(:work_item_epic_milestones, current_user)
= _('Work items')
= gl_tab_counter_badge milestone.sorted_issues(current_user).length
- else

View File

@ -1,13 +1,6 @@
= form_errors(hook)
.js-vue-webhook-form{ data: webhook_form_data(hook) }
.form-group
= form.label :token, s_('Webhooks|Secret token'), class: 'label-bold'
= form.password_field :token, value: hook.masked_token, autocomplete: 'new-password', class: 'form-control gl-form-input gl-form-input-xl'
%p.form-text.gl-text-subtle
- code_start = '<code>'.html_safe
- code_end = '</code>'.html_safe
= s_('Webhooks|Used to validate received payloads. Sent with the request in the %{code_start}X-Gitlab-Token%{code_end} HTTP header.').html_safe % { code_start: code_start, code_end: code_end }
.form-group
= form.label :url, s_('Webhooks|Trigger'), class: 'label-bold'
%ul.list-unstyled

View File

@ -0,0 +1,10 @@
---
name: work_item_epic_milestones
description: Adds the milestone widget to Epic Work Items
feature_issue_url: https://gitlab.com/groups/gitlab-org/-/epics/329#note_2479555234
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/190104
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/538993
milestone: '18.0'
group: group::product planning
type: beta
default_enabled: false

View File

@ -5,4 +5,4 @@ feature_category: team_planning
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/174405
milestone: '17.7'
queued_migration_version: 20241202142254
finalized_by: # version of the migration that finalized this BBM
finalized_by: '20250518233109'

View File

@ -0,0 +1,8 @@
---
migration_job_name: FixNonExistingTimelogUsers
description: Migrate orphaned timelogs to ghost user
feature_category: team_planning
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/189529
milestone: "18.1"
queued_migration_version: 20250517194243
finalized_by: # version of the migration that finalized this BBM

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class QueueFixNonExistingTimelogUsers < Gitlab::Database::Migration[2.3]
milestone '18.1'
restrict_gitlab_migration gitlab_schema: :gitlab_main
MIGRATION = "FixNonExistingTimelogUsers"
BATCH_SIZE = 1_000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:timelogs,
:id,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, :timelogs, :id, [])
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
class FinalizeHkBackfillResourceWeightEventsNamespaceId < Gitlab::Database::Migration[2.3]
milestone '18.1'
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
def up
ensure_batched_background_migration_is_finished(
job_class_name: 'BackfillResourceWeightEventsNamespaceId',
table_name: :resource_weight_events,
column_name: :id,
job_arguments: [:namespace_id, :issues, :namespace_id, :issue_id],
finalize: true
)
end
def down; end
end

View File

@ -0,0 +1 @@
17f481d04b92c35ae3b80a0b1b57d6e3f81996405037989ce804cc378e30a751

View File

@ -0,0 +1 @@
9629ddc4620141e4557c2bdc0b6a5a2ba9435bb8bfd71d8005b35b1051df5525

View File

@ -84,6 +84,23 @@ Emergency maintenance is initiated when urgent actions are required on a GitLab
GitLab prioritizes stability and security while minimizing customer impact during emergency maintenance. The specific maintenance procedures follow established internal processes, and all changes undergo appropriate review and approval before they are applied.
GitLab provides advance emergency maintenance notice when possible and sends complete details after the issue is resolved. The GitLab Support team creates a support ticket and notifies all [Switchboard users](create_instance/_index.md#step-1-get-access-to-switchboard) by email.
GitLab provides advance notice when possible and sends complete details
after the issue is resolved. The GitLab Support team:
You cannot postpone emergency maintenance, because the same process must be applied to all GitLab Dedicated instances to ensure their security and availability.
- Creates a support ticket for tracking.
- Sends email notifications only to addresses listed as **Operational email addresses** in the
**Customer communication** section of Switchboard.
- Copies your Customer Success Manager (CSM) on all communications.
You cannot postpone emergency maintenance, because the same process must be applied to all
GitLab Dedicated instances to ensure their security and availability.
### Verify your operational contacts
To ensure you receive maintenance notifications:
1. Sign in to [Switchboard](https://console.gitlab-dedicated.com/).
1. Select your tenant.
1. In the **Customer communication** section, review the email addresses listed under **Operational email addresses**.
To update these contacts, submit a support ticket.

View File

@ -23,8 +23,6 @@ Use SSH keys when you want to:
- Execute SSH commands from the build environment to a remote server.
- Rsync files from the build environment to a remote server.
If anything of the above rings a bell, then you most likely need an SSH key.
The most widely supported method is to inject an SSH key into your build
environment by extending your `.gitlab-ci.yml`, and it's a solution that works
with any type of [executor](https://docs.gitlab.com/runner/executors/)
@ -188,8 +186,7 @@ so there's something wrong with the server or the network.
{{< /alert >}}
Now that the `SSH_KNOWN_HOSTS` variable is created, in addition to the
[content of `.gitlab-ci.yml`](#ssh-keys-when-using-the-docker-executor)
above, you must add:
[content of `.gitlab-ci.yml`](#ssh-keys-when-using-the-docker-executor), you must add:
```yaml
before_script:
@ -222,7 +219,7 @@ before_script:
If you do not want to use a file type CI/CD variable, the [example SSH Project](https://gitlab.com/gitlab-examples/ssh-private-key/)
shows an alternative method. This method uses a regular CI/CD variable instead of
the file type variable recommended above.
the file type variable previously recommended.
## Troubleshooting

View File

@ -92,7 +92,7 @@ pipeline configuration or directly in the Git repository in a `Jenksinfile`.
## Convert Jenkins configuration to GitLab CI/CD
While the examples above are all slightly different, they can all be migrated to GitLab CI/CD
While the previous examples are all slightly different, they can all be migrated to GitLab CI/CD
with the same pipeline configuration.
Prerequisites:
@ -137,7 +137,7 @@ install-JAR:
In this example:
- `stages` defines three stages that run in order. Like the Jenkins examples above,
- `stages` defines three stages that run in order. Like the previous Jenkins examples,
the test job runs first, followed by the build job, and finally the install job.
- `variables` defines [CI/CD variables](../../variables/_index.md) that can be used by all jobs:
- `MAVEN_OPTS` are Maven environment variables needed whenever Maven is executed:
@ -169,7 +169,7 @@ Prerequisites:
This migrated pipeline configuration consists of three global keywords (`stages`, `default`, and `variables`)
followed by 3 jobs. This configuration makes use of additional GitLab CI/CD features
for an improved pipeline compared to the [example above](#convert-jenkins-configuration-to-gitlab-cicd):
for an improved pipeline compared to the [previous example](#convert-jenkins-configuration-to-gitlab-cicd):
```yaml
stages:
@ -209,7 +209,7 @@ install-JAR:
In this example:
- `stages` defines three stages that run in order. Like the Jenkins examples above,
- `stages` defines three stages that run in order. Like the previous Jenkins examples,
the test job runs first, followed by the build job, and finally the install job.
- `default` defines standard configuration to reuse in all jobs by default:
- `image` defines the Docker image container to use and execute commands in. In this example,

View File

@ -37,7 +37,7 @@ A few things that organizations have reported as helping:
- Set and communicate a clear vision of what your migration goals are, which helps
your users understand why the effort is worth it. The value is clear when
the work is done, but people need to be aware while it's in progress too.
- Sponsorship and alignment from the relevant leadership teams helps with the point above.
- Sponsorship and alignment from the relevant leadership teams helps with the previous point.
- Spend time educating your users on what's different, and share this guide
with them.
- Finding ways to sequence or delay parts of the migration can help a lot. Importantly though,

View File

@ -104,7 +104,7 @@ object BuildTest : BuildType({
In GitLab CI/CD, you define jobs with the tasks to execute as part of the pipeline.
Each job can have one or more build steps defined in it.
The equivalent GitLab CI/CD `.gitlab-ci.yml` file for the example above would be:
The equivalent GitLab CI/CD `.gitlab-ci.yml` file for the previous example would be:
```yaml
workflow:

View File

@ -328,7 +328,7 @@ Commit this pipeline configuration to the default branch, and view the pipeline
## Start using merge request pipelines
With the pipeline configurations above, the site deploys every time a pipeline completes
With the previous pipeline configurations, the site deploys every time a pipeline completes
successfully, but this is not an ideal development workflow. It's better to work from
feature branches and merge requests, and only deploy the site when changes merge
to the default branch.

View File

@ -681,7 +681,7 @@ script:
- ls -al cache/
```
The configuration above results in `git fetch` being called this way:
The previous configuration results in `git fetch` being called this way:
```shell
git fetch origin $REFSPECS --depth 50 --prune
@ -752,7 +752,7 @@ script:
- ls -al .git/modules/
```
The configuration above results in `git submodule update` being called this way:
The previous configuration results in `git submodule update` being called this way:
```shell
git submodule update --init --depth 50 --recursive --remote --jobs 4
@ -1132,7 +1132,7 @@ can be controlled with `FASTZIP_ARCHIVER_BUFFER_SIZE`. The default size for this
concurrency of 16 allocates 32 MiB. Data that exceeds the buffer size is written to and read back from disk.
Therefore, using no buffer, `FASTZIP_ARCHIVER_BUFFER_SIZE: 0`, and only scratch space is a valid option.
`FASTZIP_ARCHIVER_CONCURRENCY` controls how many files are compressed concurrency. As mentioned above, this setting
`FASTZIP_ARCHIVER_CONCURRENCY` controls how many files are compressed concurrency. As previously mentioned, this setting
therefore can increase how much memory is being used. It can also increase the temporary data written to the scratch space.
The default is the number of CPUs available, but given the memory ramifications, this may not always be the best
setting.

View File

@ -74,7 +74,7 @@ your `.gitmodules` file:
url = ../../project.git
```
The above configuration instructs Git to automatically deduce the URL to
The previous configuration instructs Git to automatically deduce the URL to
use when cloning sources. You can clone with HTTPS in all your CI/CD jobs, and you
can continue to use SSH to clone locally.

View File

@ -294,8 +294,8 @@ However, both of them would be added to the job's container with the `mysql` ali
the [default hostname naming](#accessing-the-services). This would end with one
of the services not being accessible.
After the new extended Docker configuration options, the above example would
look like:
After the new extended Docker configuration options, the previous example would
look like this:
```yaml
services:
@ -496,7 +496,7 @@ For this solution to work, you must:
- Use [the networking mode that creates a new network for each job](https://docs.gitlab.com/runner/executors/docker.html#create-a-network-for-each-job).
- [Not use the Docker executor with Docker socket binding](../docker/using_docker_build.md#use-the-docker-executor-with-docker-socket-binding).
If you must, then in the above example, instead of `host`, use the dynamic network name created for this job.
If you must, then in the previous example, instead of `host`, use the dynamic network name created for this job.
## How Docker integration works
@ -505,7 +505,7 @@ time.
1. Create any service container: `mysql`, `postgresql`, `mongodb`, `redis`.
1. Create a cache container to store all volumes as defined in `config.toml` and
`Dockerfile` of build image (`ruby:2.6` as in above example).
`Dockerfile` of build image (`ruby:2.6` as in the previous examples).
1. Create a build container and link any service container to build container.
1. Start the build container, and send a job script to the container.
1. Run the job script.
@ -598,7 +598,7 @@ Finally, create a build container by executing the `build_script` file we create
docker run --name build -i --link=service-redis:redis golang:latest /bin/bash < build_script
```
The above command creates a container named `build` that is spawned from the `golang:latest` image and has one service
The previous command creates a container named `build` that is spawned from the `golang:latest` image and has one service
linked to it. The `build_script` is piped using `stdin` to the bash interpreter which in turn executes the
`build_script` in the `build` container.

View File

@ -2175,8 +2175,8 @@ In this example:
1. GitLab checks the job log for a match with the regular expression. A line
like `Code coverage: 67.89% of lines covered` would match.
1. GitLab then checks the matched fragment to find a match to `\d+(?:\.\d+)?`.
The sample matching line above gives a code coverage of `67.89`.
1. GitLab then checks the matched fragment to find a match to the regular expression: `\d+(?:\.\d+)?`.
The sample regex can match a code coverage of `67.89`.
**Additional details**:
@ -2642,7 +2642,7 @@ rubocop:
- You can use multiple parents for `extends`.
- The `extends` keyword supports up to eleven levels of inheritance, but you should
avoid using more than three levels.
- In the example above, `.tests` is a [hidden job](../jobs/_index.md#hide-a-job),
- In the previous example, `.tests` is a [hidden job](../jobs/_index.md#hide-a-job),
but you can extend configuration from regular jobs as well.
**Related topics**:
@ -3611,7 +3611,7 @@ linux:rspec:
script: echo "Running rspec on linux..."
```
The above example generates the following jobs:
The previous example generates the following jobs:
```plaintext
linux:build: [aws, monitoring]
@ -3630,7 +3630,7 @@ The `linux:rspec` job runs as soon as the `linux:build: [aws, app1]` job finishe
- The order of the matrix variables in `needs:parallel:matrix` must match the order
of the matrix variables in the needed job. For example, reversing the order of
the variables in the `linux:rspec` job in the earlier example above would be invalid:
the variables in the `linux:rspec` job in the previous example would be invalid:
```yaml
linux:rspec:
@ -4634,7 +4634,7 @@ any subkeys. All additional details and related topics are the same.
**Supported values**:
- Same as `rules:changes` above.
- Same as `rules:changes`.
**Example of `rules:changes:paths`**:

View File

@ -161,7 +161,7 @@ job:
echo "Third command line."
```
The example above renders in the job log as:
The previous example renders in the job log as:
```shell
$ echo First command line # collapsed multiline command
@ -195,7 +195,7 @@ job:
echo "Second command line."
```
Both examples above render in the job log as:
The previous two examples render in the job log as:
```shell
$ echo First command line is split over two lines. # collapsed multiline command
@ -248,7 +248,7 @@ job:
You can define the color codes in Shell environment variables, or even [CI/CD variables](../variables/_index.md#define-a-cicd-variable-in-the-gitlab-ciyml-file),
which makes the commands easier to read and reusable.
For example, using the same example as above and environment variables defined in a `before_script`:
For example, using the previous example and environment variables defined in a `before_script`:
```yaml
job:

View File

@ -108,8 +108,7 @@ To learn more about verifying signed images/artifacts, see [Cosign Verifying doc
#### Container images
The example below demonstrates how to verify a signed container image in GitLab CI. The command-line arguments are
described [above](#verification).
The following example demonstrates how to verify a signed container image in GitLab CI. Use the previously described [command-line arguments](#verification).
```yaml
verify_image:
@ -130,8 +129,7 @@ verify_image:
#### Build artifacts
The example below demonstrates how to verify a signed build artifact in GitLab CI. Verifying an artifact requires both
the artifact itself and the `cosign.bundle` file produced by `cosign sign-blob`. The command-line arguments are
described [above](#verification).
the artifact itself and the `cosign.bundle` file produced by `cosign sign-blob`. Use the previously described [command-line arguments](#verification).
```yaml
verify_artifact:
@ -175,7 +173,7 @@ needed to make it safer to distribute and use open source software.
### Generating provenance in GitLab CI/CD
Now that Sigstore supports GitLab OIDC as described above, you can use npm provenance together with GitLab CI/CD and Sigstore to
Now that Sigstore supports GitLab OIDC as previously described, you can use npm provenance together with GitLab CI/CD and Sigstore to
generate and sign provenance for your npm packages in a GitLab CI/CD pipeline.
#### Prerequisites

View File

@ -167,7 +167,7 @@ build-job:
The `workflow:rules` templates were [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/456394)
in GitLab 17.0 and are planned for removal in 18.0. This change is a breaking change.
To configure `workflow:rules` in your pipeline, add the keyword explicitly. See the examples above for options.
To configure `workflow:rules` in your pipeline, add the keyword explicitly. See the previous examples for options.
{{< /alert >}}

View File

@ -12,91 +12,28 @@ How to set up the local development environment to run [GitLab Duo Workflow](../
- [GitLab Ultimate license](https://handbook.gitlab.com/handbook/engineering/developer-onboarding/#working-on-gitlab-ee-developer-licenses)
- [Vertex access](https://gitlab.com/gitlab-org/gitlab-development-kit/-/blob/main/doc/howto/gitlab_ai_gateway.md#use-the-existing-project): You need access to the `ai-enablement-dev-69497ba7` project in GCP because GDK by default uses Anthropic hosted on Vertex. Access to this project should be available to all engineers at GitLab.
- If you do not have Vertex access for any reason, you should unset `DUO_WORKFLOW__VERTEX_PROJECT_ID` in the Duo Workflow Service and set `ANTHROPIC_API_KEY` to a regular Anthropic API key
- Various settings and feature flags, which are enabled for you by the [GDK setup script](#gdk-setup)
- Various settings and feature flags, which are enabled for you by the [GDK setup script](#development-setup-for-backend-components)
## Set up local development for Workflow
Workflow consists of four separate services:
1. [GitLab instance](https://gitlab.com/gitlab-org/gitlab/)
1. [GitLab Duo Workflow Service](https://gitlab.com/gitlab-org/duo-workflow/duo-workflow-service)
1. GitLab Duo Workflow Service, which is part of the [GitLab AI Gateway](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/-/blob/main/docs/duo_workflow_service.md?ref_type=heads)
1. [GitLab Duo Workflow Executor](https://gitlab.com/gitlab-org/duo-workflow/duo-workflow-executor/)
1. [GitLab Duo Workflow Webview](https://gitlab.com/gitlab-org/editor-extensions/gitlab-lsp/-/blob/main/packages/webview_duo_workflow/README.md)
### GDK Setup
### Development Setup for Backend Components
You should [set up GitLab Duo Workflow with the GitLab Development Kit (GDK)](https://gitlab.com/gitlab-org/gitlab-development-kit/-/blob/main/doc/howto/duo_workflow.md)
to run local versions of GitLab, Duo Workflow Service, and Executor.
This setup can be used with the [publicly available version of the VS Code Extension](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow).
Follow [these instructions](#gitlab-duo-workflow-ui-in-visual-studio-code-vs-code) to see the GitLab Duo Workflow UI local build in VS Code. A local build is required if you are making VS Code changes or need use an unreleased version.
This setup can be used as-is with the [publicly available version of the VS Code Extension](https://marketplace.visualstudio.com/items?itemName=GitLab.gitlab-workflow).
### Manual Setup
### Development Setup for Frontend Components
#### GitLab Duo Workflow UI in Visual Studio Code (VS Code)
There is no need to set up the backend components of Duo Workflow to test changes for the GitLab Duo Workflow UI.
There is no need for the GDK, Workflow service or Workflow executor local build to test the GitLab Duo Workflow UI.
Only set these up if you are making changes to one of them and need to test their integration with the GitLab Duo Workflow UI.
A local build of the UI is required if you are making Duo Workflow UI changes that you need to view locally. A local build is also required if you want to use a version of the UI that has not been released yet.
Refer to the [GitLab Duo Workflow README](https://gitlab.com/gitlab-org/editor-extensions/gitlab-lsp/-/blob/main/packages/webview_duo_workflow/README.md) file in the Language Server project to get started with local development of GitLab Duo Workflow UI.
#### Set up your local GitLab instance
1. Configure the GitLab Duo Workflow Service URL in your local GitLab instance by updating the `config/gitlab.yml` file:
```dotenv
development:
duo_workflow:
service_url: 0.0.0.0:50052
secure: false
```
1. Restart the GitLab instance.
```shell
gdk restart rails
```
1. In your local GitLab instance, enable the `duo_workflow` feature flag from the Rails console:
```ruby
Feature.enable(:duo_workflow)
```
1. Set up [GitLab Runner with GDK](https://gitlab.com/gitlab-org/gitlab-development-kit/blob/main/doc/howto/runner.md) so you can create CI jobs locally to test Workflow.
1. Create a [personal access token](../../user/profile/personal_access_tokens.md) in your local GitLab instance with the `api` scope. Save this value and use it in the next step.
1. Run GDK with an Ultimate license.
1. If you're running GitLab in SaaS mode, you'll need to turn on the `beta and experimental features` functionality, as they are [turned off by default](../../user/gitlab_duo/turn_on_off.md#turn-on-beta-and-experimental-features). In the group settings for the project you'll run workflow against, ensure that the `Use experiment and beta Duo features` checkbox is checked.
1. Manually create a Workflow using the following `curl` request; the output will be a workflow ID that is referred to as `$WORKFLOW_ID` throughout the rest of these docs:
```shell
curl POST --verbose \
--header "Authorization: Bearer $YOUR_GITLAB_PAT" \
--header 'Content-Type: application/json' \
--data '{
"project_id": "$PROJECT_ID_FOR_RUNNING_WORKFLOW_AGAINST"
}' \
$YOUR_GDK_ROOT_URL/api/v4/ai/duo_workflows/workflows
```
#### Set up the GitLab Duo Workflow Service and Executor
Refer to the [GitLab Duo Workflow Service README](https://gitlab.com/gitlab-org/duo-workflow/duo-workflow-service) and [GitLab Duo Workflow Executor](https://gitlab.com/gitlab-org/duo-workflow/duo-workflow-executor/) to set them up individually.
## Troubleshooting
### Issues connecting to 50052 port
JAMF may be listening on the `50052` port which will conflict with GitLab Duo Workflow Service.
```shell
$ sudo lsof -i -P | grep LISTEN | grep :50052
jamfRemot <redacted> root 11u IPv4 <redacted> 0t0 TCP localhost:50052 (LISTEN)
```
To work around this,run the serveron 50053 with:
```shell
PORT=50053 poetry run duo-workflow-service
```

View File

@ -133,7 +133,7 @@ For multi-node architectures, the Bitbucket provider configuration must also be
## Bitbucket project import
After the above configuration is set up, you can use Bitbucket to sign in to
After the previous configuration is set up, you can use Bitbucket to sign in to
GitLab and [start importing your projects](../user/project/import/bitbucket.md).
If you want to import projects from Bitbucket, but don't want to enable signing in,

View File

@ -62,7 +62,7 @@ To see your new project in the list, refresh the page.
1. Go to the [Google API Console](https://console.developers.google.com/apis/dashboard).
1. Select **ENABLE APIS AND SERVICES** at the top of the page.
1. Find each of the above APIs. On the page for the API, select **ENABLE**.
1. Find each of the APIs mentioned previously. On the page for the API, select **ENABLE**.
It may take a few minutes for the API to be fully functional.
## Configure the GitLab server

View File

@ -39,7 +39,7 @@ system's Kerberos settings.
If your GitLab server is `gitlab.example.com` and your Kerberos realm
`EXAMPLE.COM`, create a Service Principal `HTTP/gitlab.example.com@EXAMPLE.COM`
in your Kerberos database.
1. Create a keytab on the GitLab server for the above Service Principal. For example,
1. Create a keytab on the GitLab server for the Service Principal. For example,
`/etc/http.keytab`.
The keytab is a sensitive file and must be readable by the GitLab user. Set

View File

@ -96,7 +96,7 @@ GitLab server do not match. Often, Windows clients work in this case while
Linux clients fail. They use reverse DNS while detecting the Kerberos
realm. If they get the wrong realm then ordinary Kerberos mechanisms fail,
so the client falls back to attempting to negotiate `IAKERB`, leading to the
above error message.
previous authentication error message.
To fix this, ensure that the forward and reverse DNS for your GitLab server
match. So for instance, if you access GitLab as `gitlab.example.com`, resolving
@ -116,7 +116,7 @@ remote: HTTP Basic: Access denied
fatal: Authentication failed for '<KRB5 path>'
```
If you are using Git v2.11 or newer and see the above error when cloning, you can
If you are using Git v2.11 or later and see the previous error when cloning, you can
set the `http.emptyAuth` Git option to `true` to fix this:
```shell

View File

@ -149,7 +149,7 @@ http://mattermost.example.com/login/gitlab/complete
Make sure to select the **Trusted** and **Confidential** settings. Under **Scopes**, select `read_user`. Then, choose **Save application**.
Once the application is created you are provided with an `Application ID` and `Secret`. One other piece of information needed is the URL of GitLab instance.
Return to the server running GitLab Mattermost and edit the `/etc/gitlab/gitlab.rb` configuration file as follows using the values you received above:
Return to the server running GitLab Mattermost and edit the `/etc/gitlab/gitlab.rb` configuration file as follows using the values you previously received:
```ruby
mattermost['gitlab_enable'] = true

View File

@ -56,4 +56,4 @@ requires both Snowflake and GitLab configuration.
1. Sign in to Snowflake.
1. Select **Data > Databases**.
1. Select the warehouse configured above.
1. Select the warehouse previously configured.

View File

@ -285,7 +285,7 @@ positives.
| `CS_REGISTRY_INSECURE` | `"false"` | Allow access to insecure registries (HTTP only). Should only be set to `true` when testing the image locally. Works with all scanners, but the registry must listen on port `80/tcp` for Trivy to work. |
| `CS_REGISTRY_PASSWORD` | `$CI_REGISTRY_PASSWORD` | Password for accessing a Docker registry requiring authentication. The default is only set if `$CS_IMAGE` resides at [`$CI_REGISTRY`](../../../ci/variables/predefined_variables.md). Not supported when FIPS mode is enabled. |
| `CS_REGISTRY_USER` | `$CI_REGISTRY_USER` | Username for accessing a Docker registry requiring authentication. The default is only set if `$CS_IMAGE` resides at [`$CI_REGISTRY`](../../../ci/variables/predefined_variables.md). Not supported when FIPS mode is enabled. |
| `CS_SEVERITY_THRESHOLD` | `UNKNOWN` | Severity level threshold. The scanner outputs vulnerabilities with severity level higher than or equal to this threshold. Supported levels are `UNKNOWN`, `LOW`, `MEDIUM`, `HIGH`, and `CRITICAL`. {{< icon name="warning" >}} **[Default value changed to `MEDIUM`](https://gitlab.com/gitlab-org/gitlab/-/issues/439782)** in GitLab 17.8. |
| `CS_SEVERITY_THRESHOLD` | `UNKNOWN` | Severity level threshold. The scanner outputs vulnerabilities with severity level higher than or equal to this threshold. Supported levels are `UNKNOWN`, `LOW`, `MEDIUM`, `HIGH`, and `CRITICAL`. |
| `CS_TRIVY_JAVA_DB` | `"registry.gitlab.com/gitlab-org/security-products/dependencies/trivy-java-db"` | Specify an alternate location for the [trivy-java-db](https://github.com/aquasecurity/trivy-java-db) vulnerability database. |
| `CS_TRIVY_DETECTION_PRIORITY` | `"precise"` | Scan using the defined Trivy [detection priority](https://trivy.dev/latest/docs/scanner/vulnerability/#detection-priority). The following values are allowed: `precise` or `comprehensive`. |
| `SECURE_LOG_LEVEL` | `info` | Set the minimum logging level. Messages of this logging level or higher are output. From highest to lowest severity, the logging levels are: `fatal`, `error`, `warn`, `info`, `debug`. |

View File

@ -0,0 +1,28 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class FixNonExistingTimelogUsers < BatchedMigrationJob
GHOST_USER_TYPE = 5
operation_name :fix_non_existing_timelog_users
feature_category :team_planning
def perform
ghost_id = User.find_by(user_type: GHOST_USER_TYPE).id
each_sub_batch do |sub_batch|
first, last = sub_batch.pick(Arel.sql('min(id), max(id)'))
query = <<~SQL
UPDATE timelogs
SET user_id = #{ghost_id}
WHERE timelogs.id BETWEEN #{first} AND #{last}
AND NOT EXISTS (SELECT 1 FROM users WHERE users.id = timelogs.user_id)
SQL
sub_batch.connection.execute(query)
end
end
end
end
end

View File

@ -5374,6 +5374,9 @@ msgstr ""
msgid "AdminUsers|There are no more seats left in your subscription. New users cannot be approved for this instance."
msgstr ""
msgid "AdminUsers|This user's access level is managed with LDAP. Remove user's mapping or change group's role in %{linkStart}LDAP synchronization%{linkEnd} to modify access."
msgstr ""
msgid "AdminUsers|To confirm, type %{name}"
msgstr ""
@ -68503,7 +68506,7 @@ msgstr ""
msgid "Webhooks|URL preview"
msgstr ""
msgid "Webhooks|Used to validate received payloads. Sent with the request in the %{code_start}X-Gitlab-Token%{code_end} HTTP header."
msgid "Webhooks|Used to validate received payloads. Sent with the request in the %{codeStart}X-Gitlab-Token%{codeEnd} HTTP header."
msgstr ""
msgid "Webhooks|Vulnerability events"

View File

@ -3,7 +3,7 @@
source 'https://rubygems.org'
gem 'gitlab-qa', '~> 15', '>= 15.5.0', require: 'gitlab/qa'
gem 'gitlab_quality-test_tooling', '~> 2.10.0', require: false
gem 'gitlab_quality-test_tooling', '~> 2.11.0', require: false
gem 'gitlab-utils', path: '../gems/gitlab-utils'
gem 'activesupport', '~> 7.1.5.1' # This should stay in sync with the root's Gemfile
gem 'allure-rspec', '~> 2.26.0'

View File

@ -139,7 +139,7 @@ GEM
rainbow (>= 3, < 4)
table_print (= 1.5.7)
zeitwerk (>= 2, < 3)
gitlab_quality-test_tooling (2.10.0)
gitlab_quality-test_tooling (2.11.0)
activesupport (>= 7.0, < 7.3)
amatch (~> 0.4.1)
fog-google (~> 1.24, >= 1.24.1)
@ -382,7 +382,7 @@ DEPENDENCIES
gitlab-orchestrator!
gitlab-qa (~> 15, >= 15.5.0)
gitlab-utils!
gitlab_quality-test_tooling (~> 2.10.0)
gitlab_quality-test_tooling (~> 2.11.0)
googleauth (~> 1.9.0)
influxdb-client (~> 3.2)
junit_merge (~> 0.1.2)

View File

@ -324,6 +324,9 @@ module InternalEventsCli
@metrics.map.with_index do |metric, idx|
new_page!(on_step: 'Save files', steps: STEPS) # Repeat the same step but increment metric counter
cli.say show_all_metric_paths(metric)
cli.say "\n"
cli.say format_prompt(format_subheader('SAVING FILE', metric.description, idx, @metrics.length))
cli.say "\n"
@ -331,6 +334,20 @@ module InternalEventsCli
end
end
def show_all_metric_paths(metric)
time_frames = metric.time_frame.value
return unless time_frames.is_a?(Array) && time_frames.length > 1
cli.say <<~TEXT
#{format_info "This would create #{time_frames.length} metrics with the following key paths:"}
#{time_frames.map do |time_frame|
"#{TimeFramedKeyPath::METRIC_TIME_FRAME_DESC[time_frame]}: #{format_info(TimeFramedKeyPath.build(metric.key_path, time_frame))}" # {' '}
end.join("\n")}
TEXT
end
def prompt_for_next_steps(outcomes = [])
new_page!

View File

@ -188,16 +188,9 @@ module InternalEventsCli
class Metric
TimeFrame = Struct.new(:value) do
def description
case value
when Array
nil # array time_frame metrics have no description prefix
when '7d'
'weekly'
when '28d'
'monthly'
when 'all'
'total'
end
return if value.is_a? Array # array time_frame metrics have no description prefix
TimeFramedKeyPath::METRIC_TIME_FRAME_DESC[value]
end
def directory_name

View File

@ -1,17 +1,19 @@
# frozen_string_literal: true
# Helpers for shared & state across all CLI flows
# Helpers for shared & state across all CLI flows
module InternalEventsCli
class TimeFramedKeyPath
METRIC_TIME_FRAME_SUFFIX = {
'7d' => '_weekly',
'28d' => '_monthly',
'all' => ''
METRIC_TIME_FRAME_DESC = {
'7d' => 'weekly',
'28d' => 'monthly',
'all' => 'total'
}.freeze
def self.build(base_key_path, time_frame)
# copy logic of Gitlab::Usage::MetricDefinition
"#{base_key_path}#{METRIC_TIME_FRAME_SUFFIX[time_frame]}"
return base_key_path if time_frame == 'all'
"#{base_key_path}_#{METRIC_TIME_FRAME_DESC[time_frame]}"
end
end
end

View File

@ -35,7 +35,7 @@ RSpec.describe "User creates milestone", :js, feature_category: :team_planning d
before do
sign_in(user)
stub_feature_flags(work_items_alpha: false)
stub_feature_flags(work_item_epic_milestones: false)
visit(new_project_milestone_path(project))
end

View File

@ -71,7 +71,7 @@ RSpec.describe "User views milestone", feature_category: :team_planning do
context 'when issues on milestone are over DISPLAY_ISSUES_LIMIT' do
it "limits issues to display and shows warning" do
stub_feature_flags(work_items_alpha: false)
stub_feature_flags(work_item_epic_milestones: false)
stub_const('Milestoneish::DISPLAY_ISSUES_LIMIT', 3)
visit(project_milestone_path(project, milestone))
@ -82,9 +82,9 @@ RSpec.describe "User views milestone", feature_category: :team_planning do
end
end
context 'when issues on milestone are over DISPLAY_ISSUES_LIMIT and work_items_alpha FF is on' do
context 'when issues on milestone are over DISPLAY_ISSUES_LIMIT and work_item_epic_milestones FF is on' do
it "limits issues to display and shows warning" do
stub_feature_flags(work_items_alpha: true)
stub_feature_flags(work_item_epic_milestones: true)
stub_const('Milestoneish::DISPLAY_ISSUES_LIMIT', 3)
visit(project_milestone_path(project, milestone))

View File

@ -22,7 +22,7 @@ RSpec.describe 'Project milestone', :js, feature_category: :team_planning do
before do
sign_in(user)
stub_feature_flags(work_items_alpha: false)
stub_feature_flags(work_item_epic_milestones: false)
end
context 'when project has enabled issues' do

View File

@ -219,12 +219,17 @@ describe('Merge requests app component', () => {
},
],
},
author: { id: 'gid://gitlab/User/1' },
reviewers: { nodes: [] },
},
},
});
expect(eventHub.$emit).toHaveBeenCalledWith('refetch.mergeRequests', 'assignedMergeRequests');
expect(eventHub.$emit).toHaveBeenCalledWith(
'refetch.mergeRequests',
'authorOrAssigneeMergeRequests',
);
});
it('emits refetch.mergeRequests with assignedMergeRequests when current user is a reviewer', async () => {
@ -243,6 +248,7 @@ describe('Merge requests app component', () => {
},
],
},
author: { id: 'gid://gitlab/User/1' },
assignees: { nodes: [] },
},
},

View File

@ -17,6 +17,7 @@ describe('WebhookFormApp', () => {
const findNameInput = () => wrapper.findByTestId('webhook-name');
const findDescriptionInput = () => wrapper.findByTestId('webhook-description');
const findSecretTokenInput = () => wrapper.findByTestId('webhook-secret-token');
const findFormUrlApp = () => wrapper.findComponent(FormUrlApp);
const findFormCustomHeaders = () => wrapper.findComponent(FormCustomHeaders);
@ -63,6 +64,25 @@ describe('WebhookFormApp', () => {
});
});
describe('secret token input', () => {
it('renders secret token input', () => {
expect(findSecretTokenInput().attributes('name')).toBe('hook[token]');
expect(findSecretTokenInput().props('value')).toBe('');
});
it('uses initialSecretToken as initial value', () => {
const initialSecretToken = '************';
createComponent({
props: {
initialSecretToken,
},
});
expect(findSecretTokenInput().props('value')).toBe(initialSecretToken);
});
});
describe('FormUrlApp component', () => {
it('renders FormUrlApp', () => {
expect(findFormUrlApp().props()).toMatchObject({

View File

@ -73,7 +73,7 @@ describe('WorkItemChangeTypeModal component', () => {
const createComponent = ({
hasParent = false,
hasChildren = false,
workItemsAlpha = false,
workItemEpicMilestones = false,
widgets = [],
workItemType = WORK_ITEM_TYPE_NAME_TASK,
convertWorkItemMutationHandler = convertWorkItemMutationSuccessHandler,
@ -100,7 +100,7 @@ describe('WorkItemChangeTypeModal component', () => {
},
provide: {
glFeatures: {
workItemsAlpha,
workItemEpicMilestones,
},
hasSubepicsFeature,
},
@ -232,7 +232,7 @@ describe('WorkItemChangeTypeModal component', () => {
createComponent({
workItemType: WORK_ITEM_TYPE_NAME_ISSUE,
widgets: [workItemChangeTypeWidgets.MILESTONE],
workItemsAlpha: true,
workItemEpicMilestones: true,
allowedConversionTypesEE,
});

View File

@ -13,9 +13,10 @@ RSpec.describe HooksHelper, feature_category: :integrations do
context 'when there are no URL variables' do
it 'returns proper data' do
expect(subject).to match(
is_expected.to match(
name: project_hook.name,
description: project_hook.description,
secret_token: nil,
url: project_hook.url,
url_variables: "[]",
custom_headers: "[]"
@ -24,12 +25,13 @@ RSpec.describe HooksHelper, feature_category: :integrations do
end
context 'when there are URL variables' do
let(:project_hook) { build_stubbed(:project_hook, :url_variables, project: project) }
let(:project_hook) { build_stubbed(:project_hook, :url_variables, :token, project: project) }
it 'returns proper data' do
expect(subject).to match(
is_expected.to match(
name: project_hook.name,
description: project_hook.description,
secret_token: WebHook::SECRET_MASK,
url: project_hook.url,
url_variables: Gitlab::Json.dump([{ key: 'abc' }, { key: 'def' }]),
custom_headers: "[]"
@ -38,12 +40,13 @@ RSpec.describe HooksHelper, feature_category: :integrations do
end
context 'when there are custom headers' do
let(:project_hook) { build_stubbed(:project_hook, project: project, custom_headers: { test: 'blub' }) }
let(:project_hook) { build_stubbed(:project_hook, :token, project: project, custom_headers: { test: 'blub' }) }
it 'returns proper data' do
expect(subject).to match(
is_expected.to match(
name: project_hook.name,
description: project_hook.description,
secret_token: WebHook::SECRET_MASK,
url: project_hook.url,
url_variables: "[]",
custom_headers: Gitlab::Json.dump([{ key: 'test', value: WebHook::SECRET_MASK }])

View File

@ -0,0 +1,44 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::FixNonExistingTimelogUsers, feature_category: :team_planning do
let(:users_table) { table(:users) }
let(:timelogs_table) { table(:timelogs) }
let!(:ghost) { users_table.create!(user_type: 5, projects_limit: 0) }
let(:user) { users_table.create!(email: generate(:email), projects_limit: 0) }
let(:deleted_user) { users_table.create!(email: generate(:email), projects_limit: 0) }
let!(:timelog_invalid) do
timelogs_table.create!(user_id: deleted_user.id, time_spent: 5)
end
let!(:timelog_valid) do
timelogs_table.create!(user_id: user.id, time_spent: 5)
end
let(:start_id) { timelogs_table.minimum(:id) }
let(:end_id) { timelogs_table.maximum(:id) }
describe '#perform' do
it 'migrates the invalid timelog' do
expect(timelog_invalid.reload.user_id).to eq(deleted_user.id)
deleted_user.delete
described_class.new(
start_id: start_id,
end_id: end_id,
batch_table: :timelogs,
batch_column: :id,
sub_batch_size: 2,
pause_ms: 0,
connection: ApplicationRecord.connection
).perform
expect(timelog_invalid.reload.user_id).to eq(ghost.id)
expect(timelog_valid.reload.user_id).to eq(user.id)
end
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueFixNonExistingTimelogUsers, migration: :gitlab_main, feature_category: :team_planning do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
gitlab_schema: :gitlab_main,
table_name: :timelogs,
column_name: :id,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
}
end
end
end

View File

@ -4817,9 +4817,23 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
create(:ci_build, :artifacts, pipeline: pipeline)
end
let(:pipeline) { create(:ci_pipeline, :success) }
let_it_be(:pipeline) { create(:ci_pipeline, :success) }
it { expect(subject).to be_falsey }
context 'when the child pipeline has code quality reports' do
let_it_be(:child_pipeline) { create(:ci_pipeline, :with_codequality_report, child_of: pipeline) }
it { expect(subject).to be_truthy }
context 'with FF show_child_reports_in_mr_page disabled' do
before do
stub_feature_flags(show_child_reports_in_mr_page: false)
end
it { expect(subject).to be_falsey }
end
end
end
end
@ -4988,6 +5002,24 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
it 'returns codequality reports without degradations' do
expect(codequality_reports.degradations).to be_empty
end
context 'when child pipeline has codequality reports' do
let_it_be(:child_pipeline) { create(:ci_pipeline, :with_codequality_report, child_of: pipeline) }
it 'returns codequality report with collected data' do
expect(codequality_reports.degradations_count).to eq(3)
end
context 'with FF show_child_reports_in_mr_page disabled' do
before do
stub_feature_flags(show_child_reports_in_mr_page: false)
end
it 'returns codequality reports without degradations' do
expect(codequality_reports.degradations).to be_empty
end
end
end
end
end

View File

@ -28,7 +28,7 @@ RSpec.describe Milestone, 'Milestoneish', factory_default: :keep do
before do
project.add_developer(member)
project.add_guest(guest)
stub_feature_flags(work_items_alpha: false)
stub_feature_flags(work_item_epic_milestones: false)
end
describe '#sorted_issues' do

View File

@ -477,6 +477,35 @@ RSpec.describe 'InternalEventsCli::Flows::MetricDefiner', :aggregate_failures, f
end
context "when creating a multiple metrics" do
it 'displays the metric paths based on the time frames' do
queue_cli_inputs([
"2\n", # Enum-select: New Metric -- calculate how often one or more existing events occur over time
"2\n", # Enum-select: Multiple events -- count occurrences of several separate events or interactions
'internal_events_cli', # Filters to the relevant events
' ', # Multi-select: internal_events_cli_closed
"\e[B", # Arrow down to: internal_events_cli_used
' ', # Multi-select: internal_events_cli_used
"\n", # Submit selections
"\n", # Select: Monthly/Weekly/Total count
"where a definition file was created with the CLI\n", # Input description
"1\n", # Select: Copy & continue
"\e[B \n" # Skip product categories
# "y\n" # Create file
])
expected_output = <<~TEXT.chomp
This would create 3 metrics with the following key paths:
monthly: counts.count_total_internal_events_cli_closed_and_internal_events_cli_used_monthly
weekly: counts.count_total_internal_events_cli_closed_and_internal_events_cli_used_weekly
total: counts.count_total_internal_events_cli_closed_and_internal_events_cli_used
TEXT
with_cli_thread do
expect { plain_last_lines }.to eventually_include_cli_text(expected_output)
end
end
it 'shows link to the metric dashboard' do
queue_cli_inputs([
"2\n", # Enum-select: New Metric -- calculate how often one or more existing events occur over time