Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-10-28 15:24:31 +00:00
parent 2fb61152d1
commit 28e5c51ff0
45 changed files with 974 additions and 142 deletions

View File

@ -3019,8 +3019,6 @@
changes:
- ".gitlab/ci/test-metadata.gitlab-ci.yml"
- "scripts/rspec_helpers.sh"
when: manual
allow_failure: true
- <<: *if-merge-request-not-approved
when: never
- <<: *if-merge-request
@ -3362,25 +3360,14 @@
- <<: *if-merge-request-labels-run-observability-e2e-tests-current-branch
when: never
- <<: *if-merge-request-labels-run-observability-e2e-tests-main-branch
allow_failure: true
- <<: *if-merge-request
changes: *observability-code-patterns
allow_failure: true
- <<: *if-merge-request
changes: *code-patterns
when: manual
allow_failure: true
.observability-backend-current-branch:rules:
rules:
- <<: *if-merge-request-labels-run-observability-e2e-tests-main-branch
when: never
- <<: *if-merge-request-labels-run-observability-e2e-tests-current-branch
allow_failure: true
- <<: *if-merge-request
changes: *code-patterns
when: manual
allow_failure: true
##########################
# Pre-merge checks rules #

View File

@ -1,7 +1,4 @@
.tests-metadata-state:
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}
before_script:
- source scripts/utils.sh
artifacts:
expire_in: 31d
paths:
@ -13,21 +10,26 @@
retrieve-tests-metadata:
extends:
- .tests-metadata-state
- .fast-no-clone-job
- .test-metadata:rules:retrieve-tests-metadata
# We use a smaller image for this job only (update-tests-metadata compiles some gems)
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}-slim
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}-alpine3.20
stage: prepare
needs: []
variables:
FILES_TO_DOWNLOAD: >
scripts/setup/tests-metadata.rb
before_script:
- apk add --no-cache --update curl # Not present in ruby-alpine, so we add it manually
- !reference [".fast-no-clone-job", before_script]
script:
- apt-get update && apt-get install -y curl # Not present in ruby-slim, so we add it manually
- install_gitlab_gem
- source ./scripts/rspec_helpers.sh
- retrieve_tests_metadata
- ruby scripts/setup/tests-metadata.rb retrieve
update-tests-metadata:
extends:
- .tests-metadata-state
- .test-metadata:rules:update-tests-metadata
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}
stage: post-test
needs:
- job: retrieve-tests-metadata
@ -42,9 +44,27 @@ update-tests-metadata:
optional: true
- job: rspec:artifact-collector ee remainder
optional: true
variables:
FLAKY_RSPEC_GENERATE_REPORT: "true"
script:
- run_timed_command "retry gem install fog-aws mime-types activesupport rspec_profiling postgres-copy --no-document"
- source ./scripts/rspec_helpers.sh
- source scripts/utils.sh
- source scripts/rspec_helpers.sh
- test -f "${FLAKY_RSPEC_SUITE_REPORT_PATH}" || echo -e "\e[31m" 'Consider add ~"pipeline:run-all-rspec" to run full rspec jobs' "\e[0m"
- run_timed_command "retry gem install fog-aws mime-types activesupport rspec_profiling postgres-copy --no-document"
- update_tests_metadata
- update_tests_mapping
verify-tests-metadata:
extends:
- .default-retry
- .ruby-cache
- .test-metadata:rules:update-tests-metadata
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:${RUBY_VERSION}
stage: post-test
needs:
- update-tests-metadata
before_script:
- source scripts/utils.sh
- bundle_install_script
script:
- bundle exec scripts/setup/tests-metadata.rb verify

View File

@ -5,7 +5,7 @@ variables:
EXIFTOOL_VERSION: "12.60"
GCLOUD_VERSION: "413"
GIT_VERSION: "2.45"
GO_VERSION: "1.22"
GO_VERSION: "1.23"
GRAPHICSMAGICK_VERSION: "1.3.36"
HELM_VERSION: "3.16"
KIND_VERSION: "0.24"

View File

@ -173,6 +173,7 @@ export default {
workItemType?.widgetDefinitions,
workItemType.name,
workItemType.id,
workItemType.iconName,
);
this.selectedWorkItemTypeId = workItemType?.id;
} else {
@ -182,6 +183,7 @@ export default {
workItemType?.widgetDefinitions,
workItemType.name,
workItemType.id,
workItemType.iconName,
);
});
this.showWorkItemTypeSelect = true;
@ -234,6 +236,9 @@ export default {
selectedWorkItemTypeName() {
return this.selectedWorkItemType?.name;
},
selectedWorkItemTypeIconName() {
return this.selectedWorkItemType?.iconName;
},
formOptions() {
return [{ value: null, text: s__('WorkItem|Select type') }, ...this.workItemTypesForSelect];
},
@ -486,6 +491,7 @@ export default {
selectedWorkItemWidgets,
this.selectedWorkItemTypeName,
this.selectedWorkItemTypeId,
this.selectedWorkItemTypeIconName,
);
},
},

View File

@ -108,6 +108,7 @@ export default {
this.workItemTypes[0]?.widgetDefinitions,
this.workItemTypeName,
this.workItemTypes[0]?.id,
this.workItemTypes[0]?.iconName,
);
},
error() {
@ -169,6 +170,7 @@ export default {
this.workItemTypes[0]?.widgetDefinitions,
this.workItemTypeName,
this.workItemTypes[0]?.id,
this.workItemTypes[0]?.iconName,
);
}
this.hideModal();

View File

@ -62,6 +62,9 @@ export default {
isWorkItemConfidential() {
return this.workItem?.confidential;
},
isLoading() {
return this.$apollo.queries.workItem.loading;
},
},
apollo: {
// eslint-disable-next-line @gitlab/vue-no-undef-apollo-properties
@ -86,7 +89,10 @@ export default {
</script>
<template>
<div class="gl-mb-3 gl-mt-3 gl-text-gray-700">
<div v-if="isLoading">
<gl-loading-icon inline />
</div>
<div v-else class="gl-mb-3 gl-mt-3 gl-text-gray-700">
<work-item-state-badge v-if="workItemState" :work-item-state="workItemState" />
<gl-loading-icon v-if="updateInProgress" inline />
<confidentiality-badge

View File

@ -297,6 +297,7 @@ export const setNewWorkItemCache = async (
widgetDefinitions,
workItemType,
workItemTypeId,
workItemTypeIconName,
// eslint-disable-next-line max-params
) => {
const workItemAttributesWrapperOrder = [
@ -586,7 +587,7 @@ export const setNewWorkItemCache = async (
workItemType: {
id: workItemTypeId || 'mock-work-item-type-id',
name: workItemTitleCase,
iconName: 'issue-type-epic',
iconName: workItemTypeIconName,
__typename: 'WorkItemType',
},
userPermissions: {

View File

@ -1,6 +1,7 @@
fragment WorkItemTypeFragment on WorkItemType {
id
name
iconName
widgetDefinitions {
type
... on WorkItemWidgetDefinitionHierarchy {

View File

@ -8,7 +8,8 @@ module Members
'required' => %w[source_id source_type],
'properties' => {
'source_id' => { 'type' => 'integer' },
'source_type' => { 'type' => 'string' }
'source_type' => { 'type' => 'string' },
'invited_user_ids' => { 'type' => 'array' }
}
}
end

View File

@ -82,6 +82,10 @@ class Member < ApplicationRecord
Member.default_scoped.from_union([group_members, project_members]).merge(self)
end
scope :including_user_ids, ->(user_ids) do
where(user_id: user_ids)
end
scope :excluding_users, ->(user_ids) do
where.not(user_id: user_ids)
end

View File

@ -39,6 +39,7 @@ module Ci
Gitlab::Ci::Pipeline::Chain::Metrics,
Gitlab::Ci::Pipeline::Chain::TemplateUsage,
Gitlab::Ci::Pipeline::Chain::ComponentUsage,
Gitlab::Ci::Pipeline::Chain::KeywordUsage,
Gitlab::Ci::Pipeline::Chain::Pipeline::Process].freeze
# Create a new pipeline in the specified project.

View File

@ -8,10 +8,7 @@ module Search
{
state: params[:state],
confidential: params[:confidential],
include_archived: params[:include_archived],
num_context_lines: params[:num_context_lines]&.to_i,
hybrid_similarity: params[:hybrid_similarity]&.to_f,
hybrid_boost: params[:hybrid_boost]&.to_f
include_archived: params[:include_archived]
}
end
end

View File

@ -0,0 +1,18 @@
---
description: Usage of run keyword in CICD Yaml
internal_events: true
action: use_run_keyword_in_cicd_yaml
identifiers:
- project
- namespace
- user
product_group: pipeline_authoring
milestone: '17.6'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/169408
distributions:
- ce
- ee
tiers:
- free
- premium
- ultimate

View File

@ -0,0 +1,9 @@
---
name: run_keyword_instrumentation
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/467245
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/169408
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/500973
milestone: '17.6'
group: group::pipeline authoring
type: gitlab_com_derisk
default_enabled: false

View File

@ -103,6 +103,8 @@ RspecProfiling.configure do |config|
RspecProfiling::VCS::Git.prepend(RspecProfilingExt::Git)
RspecProfiling::Run.prepend(RspecProfilingExt::Run)
RspecProfiling::Example.prepend(RspecProfilingExt::Example)
FileUtils.mkdir_p(File.dirname(ENV['RSPEC_PROFILING_FOLDER_PATH']))
config.collector = RspecProfilingExt::Collectors::CSVWithTimestamps
config.csv_path = -> do
prefix = "#{ENV['CI_JOB_NAME']}-".gsub(%r{[ /]}, '-') if ENV['CI_JOB_NAME']

View File

@ -0,0 +1,21 @@
---
key_path: counts.count_total_use_run_keyword_in_cicd_yaml_monthly
description: Monthly count of run keyword usages in GitLab cicd yaml
product_group: pipeline_authoring
performance_indicator_type: []
value_type: number
status: active
milestone: '17.6'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/169408
time_frame: 28d
data_source: internal_events
data_category: optional
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
events:
- name: use_run_keyword_in_cicd_yaml

View File

@ -0,0 +1,21 @@
---
key_path: counts.count_total_use_run_keyword_in_cicd_yaml_weekly
description: Weekly count of run keyword usages in GitLab cicd yaml
product_group: pipeline_authoring
performance_indicator_type: []
value_type: number
status: active
milestone: '17.6'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/169408
time_frame: 7d
data_source: internal_events
data_category: optional
distribution:
- ce
- ee
tier:
- free
- premium
- ultimate
events:
- name: use_run_keyword_in_cicd_yaml

View File

@ -375,6 +375,8 @@
- 1
- - gitlab_subscriptions_member_management_apply_pending_member_approvals
- 1
- - gitlab_subscriptions_members_added
- 1
- - gitlab_subscriptions_members_destroyed
- 1
- - gitlab_subscriptions_members_record_last_activity

View File

@ -43,8 +43,8 @@ OPTIONAL_REVIEW_TEMPLATE = '%{role} review is optional for %{category}'
NOT_AVAILABLE_TEMPLATES = {
default: 'No %{role} available',
analytics_instrumentation: group_not_available_template('#g_analyze_analytics_instrumentation', '@gitlab-org/analytics-section/analytics-instrumentation/engineers'),
import_integrate_be: group_not_available_template('#g_manage_import_and_integrate', '@gitlab-org/manage/import-and-integrate'),
import_integrate_fe: group_not_available_template('#g_manage_import_and_integrate', '@gitlab-org/manage/import-and-integrate'),
import_integrate_be: group_not_available_template('#g_import_and_integrate', '@gitlab-org/foundations/import-and-integrate'),
import_integrate_fe: group_not_available_template('#g_import_and_integrate', '@gitlab-org/foundations/import-and-integrate'),
remote_development_be: group_not_available_template('#f_remote_development', '@gitlab-org/maintainers/remote-development/backend')
}.freeze

View File

@ -11,8 +11,8 @@
issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/498268
# Use the impact calculator https://gitlab-com.gitlab.io/gl-infra/breaking-change-impact-calculator/?
impact: medium
scope: [instance, group, project]
resolution_role: [Admin, Owner, Maintainer]
scope: instance, group, project
resolution_role: Admin, Owner, Maintainer
manual_task: true
body: | # (required) Don't change this line.
The [GitLab Runner Docker Machine executor](https://docs.gitlab.com/runner/executors/docker_machine.html) is deprecated and will be fully removed from the product as a supported feature in GitLab 20.0 (May 2027). The replacement for Docker Machine, [GitLab Runner Autoscaler](https://docs.gitlab.com/runner/runner_autoscale/) with GitLab developed plugins for Amazon Web Services (AWS) EC2, Google Compute Engine (GCE) and Microsoft Azure virtual machines (VMs) is generally available. With this announcement, the GitLab Runner team will no longer accept community contributions for the GitLab maintained Docker Machine fork, or resolve newly identified bugs.

View File

@ -16,7 +16,7 @@ DETAILS:
> - [Enabled on self-managed](https://gitlab.com/groups/gitlab-org/-/epics/15176) in GitLab 17.6.
> - GitLab Duo add-on required in GitLab 17.6 and later.
There are multiple platforms available to host your self-hosted Large Language Models (LLMs). Each platform has unique features and benefits that can cater to different needs. The following documentation summarises some of the most popular options:
There are multiple platforms available to host your self-hosted Large Language Models (LLMs). Each platform has unique features and benefits that can cater to different needs. The following documentation summarises the currently supported options:
## For non-cloud on-premise model deployments

View File

@ -56,9 +56,9 @@ The following models are under evaluation, and support is limited:
## Hardware Requirements
For optimal performance, the following hardware specifications are recommended for hosting these models:
For optimal performance, the following hardware specifications are recommended as baselines for hosting these models. Hosting requirements may vary depending model to model, so we recommend checking model vendor documentation as well:
- **CPU**: Minimum 8 cores (16 threads recommended).
- **RAM**: At least 32 GB (64 GB or more recommended for larger models).
- **GPU**: NVIDIA A100 or equivalent for optimal inference performance.
- **GPU**: 2x NVIDIA A100 or equivalent for optimal inference performance.
- **Storage**: SSD with sufficient space for model weights and data.

View File

@ -9,7 +9,7 @@ info: Any user with at least the Maintainer role can merge updates to this conte
## Prerequisites
To test imports, you need a Bitbucket Server instance running locally. For information on running a local instance, see
[these instructions](https://gitlab.com/gitlab-org/manage/import-and-integrate/team/-/blob/main/integrations/bitbucket_server.md).
[these instructions](https://gitlab.com/gitlab-org/foundations/import-and-integrate/team/-/blob/main/integrations/bitbucket_server.md).
## Code structure

View File

@ -44,7 +44,7 @@ that are coded across multiple repositories.
| [All feature flags in GitLab](../../../user/feature_flags.md) | [Generated during docs build](https://gitlab.com/gitlab-org/gitlab-docs/-/blob/main/doc/raketasks.md#generate-the-feature-flag-tables) | [Technical Writing](https://handbook.gitlab.com/handbook/product/ux/technical-writing/) |
| [GitLab Runner feature flags](https://docs.gitlab.com/runner/configuration/feature-flags.html) | [Page source](https://gitlab.com/gitlab-org/gitlab-runner/-/blob/ec6e1797d2173a95c8ac7f726bd62f6f110b7211/docs/configuration/feature-flags.md?plain=1#L39) | [Runner](https://handbook.gitlab.com/handbook/engineering/development/ops/verify/runner/) |
| [Deprecations and removals by version](../../../update/deprecations.md) | [Deprecating GitLab features](../../deprecation_guidelines/index.md) | |
| [GraphQL API resources](../../../api/graphql/reference/index.md) | [GraphQL API style guide](../../api_graphql_styleguide.md#documentation-and-schema) | [Import and Integrate](https://handbook.gitlab.com/handbook/engineering/development/dev/manage/import-and-integrate/) |
| [GraphQL API resources](../../../api/graphql/reference/index.md) | [GraphQL API style guide](../../api_graphql_styleguide.md#documentation-and-schema) | [Import and Integrate](https://handbook.gitlab.com/handbook/engineering/development/dev/foundations/import-and-integrate/) |
| [Audit event types](../../../user/compliance/audit_event_types.md) | [Audit event development guidelines](../../audit_event_guide/index.md) | [Compliance](https://handbook.gitlab.com/handbook/engineering/development/sec/govern/compliance/) |
| [Available custom role permissions](../../../user/custom_roles/abilities.md) | [Generated by Rake task](https://gitlab.com/gitlab-org/gitlab/-/blob/master/tooling/custom_roles/docs/templates/custom_abilities.md.erb) | [Authorization](https://handbook.gitlab.com/handbook/product/categories/#authorization-group)|
| DAST vulnerability check documentation ([Example](../../../user/application_security/dast/browser/checks/798.19.md)) | [How to generate the Markdown](https://gitlab.com/gitlab-org/security-products/dast-cwe-checks/-/blob/main/doc/how-to-generate-the-markdown-documentation.md) | [Dynamic Analysis](https://handbook.gitlab.com/handbook/product/categories/#dynamic-analysis-group) |

View File

@ -137,8 +137,8 @@ See [database guidelines](database/index.md).
The following integration guides are internal. Some integrations require access to administrative accounts of third-party services and are available only for GitLab team members to contribute to:
- [Jira integration development](https://gitlab.com/gitlab-org/manage/import-and-integrate/team/-/blob/main/integrations/jira.md)
- [GitLab for Slack app development](https://gitlab.com/gitlab-org/manage/import-and-integrate/team/-/blob/main/integrations/slack.md)
- [Jira integration development](https://gitlab.com/gitlab-org/foundations/import-and-integrate/team/-/blob/main/integrations/jira.md)
- [GitLab for Slack app development](https://gitlab.com/gitlab-org/foundations/import-and-integrate/team/-/blob/main/integrations/slack.md)
## Testing guides

View File

@ -21,7 +21,7 @@ GitLab is being translated into many languages. To select a language to contribu
- If the language you want is available, proceed to the next step.
- If the language you want is not available,
[open an issue](https://gitlab.com/gitlab-org/gitlab/-/issues?scope=all&utf8=✓&state=all&label_name[]=Category%3AInternationalization).
Notify our Crowdin administrators by including `@gitlab-org/manage/import-and-integrate` in your issue.
Notify our Crowdin administrators by including `@gitlab-org/foundations/import-and-integrate` in your issue.
- After the issue and any merge requests are complete, restart this procedure.
1. View the list of files and folders. Select `gitlab.pot` to open the translation editor.

View File

@ -12,7 +12,7 @@ which are part of our [main Rails project](https://gitlab.com/gitlab-org/gitlab)
Also see our [direction page](https://about.gitlab.com/direction/manage/import_and_integrate/integrations/) for an overview of our strategy around integrations.
This guide is a work in progress. You're welcome to ping `@gitlab-org/manage/import-and-integrate`
This guide is a work in progress. You're welcome to ping `@gitlab-org/foundations/import-and-integrate`
if you need clarification or spot any outdated information.
## Add a new integration

View File

@ -125,9 +125,9 @@ If you use Gitpod and you get an error about Jira not being able to access the d
## Setup without Jira
If you do not require Jira to test with, you can use the [Jira connect test tool](https://gitlab.com/gitlab-org/manage/import-and-integrate/jira-connect-test-tool) and your local GDK.
If you do not require Jira to test with, you can use the [Jira connect test tool](https://gitlab.com/gitlab-org/foundations/import-and-integrate/jira-connect-test-tool) and your local GDK.
1. Clone the [**Jira-connect-test-tool**](https://gitlab.com/gitlab-org/manage/import-and-integrate/jira-connect-test-tool) `git clone git@gitlab.com:gitlab-org/manage/integrations/jira-connect-test-tool.git`.
1. Clone the [**Jira-connect-test-tool**](https://gitlab.com/gitlab-org/foundations/import-and-integrate/jira-connect-test-tool) `git clone git@gitlab.com:gitlab-org/manage/integrations/jira-connect-test-tool.git`.
1. Start the app `bundle exec rackup`. (The app requires your GDK GitLab to be available on `http://127.0.0.1:3000`.).
1. Open `config/gitlab.yml` and uncomment the `jira_connect` config.
1. If running GDK on a domain other than `localhost`, you must add the domain to `additional_iframe_ancestors`. For example:

View File

@ -61,6 +61,7 @@ How to enable:
1. Restart your instance (e.g. `gdk restart`).
1. Follow the [instructions to activate your new license](../../administration/license.md#activate-gitlab-ee).
1. Test out the GitLab Observability feature by navigating to a project and selecting Tracing, Metrics, or Logs from the Monitor section of the navigation menu.
1. If you are seeing 404 errors you might need to manually [refresh](../../subscriptions/self_managed/index.md#manually-synchronize-subscription-data) your license data.
### Use the purpose built `devvm`

View File

@ -324,7 +324,7 @@ There are two Observability end-to-end jobs:
- `e2e:observability-backend-main-branch`: executes the tests against the main branch of the GitLab Observability Backend.
- `e2e:observability-backend`: executes the tests against a branch of the GitLab Observability Backend with the same name as the MR branch.
The Observability E2E jobs are triggered automatically for merge requests that touch relevant files, such as those in the `lib/gitlab/observability/` directory or specific configuration files related to observability features.
The Observability E2E jobs are triggered automatically **only** for merge requests that touch relevant files, such as those in the `lib/gitlab/observability/` directory or specific configuration files related to observability features.
To run these jobs manually, you can add the `pipeline:run-observability-e2e-tests-main-branch` or `pipeline:run-observability-e2e-tests-current-branch` label to your merge request.
@ -339,7 +339,7 @@ In the following example workflow, a developer creates an MR that touches Observ
1. The developer should add the `pipeline:run-observability-e2e-tests-current-branch` label on the GitLab MR and wait for the `e2e:observability-backend` job to succeed.
1. If `e2e:observability-backend` succeeds, the developer can merge both MRs.
+In addition, the developer can manually add `pipeline:run-observability-e2e-tests-main-branch` to force the MR to run the `e2e:observability-backend-main-branch` job. This could be useful in case of changes to files that are not being tracked as related to observability.
In addition, the developer can manually add `pipeline:run-observability-e2e-tests-main-branch` to force the MR to run the `e2e:observability-backend-main-branch` job. This could be useful in case of changes to files that are not being tracked as related to observability.
### Review app jobs

View File

@ -0,0 +1,39 @@
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Chain
class KeywordUsage < Chain::Base
include Gitlab::InternalEventsTracking
def perform!
return unless Feature.enabled?(:run_keyword_instrumentation, :instance)
track_keyword_usage
end
def break?
false
end
private
def track_keyword_usage
track_keyword_usage_for('run', command.yaml_processor_result.uses_keyword?(:run))
end
def track_keyword_usage_for(keyword, used)
return unless used
track_internal_event(
"use_#{keyword}_keyword_in_cicd_yaml",
project: @pipeline.project,
user: @pipeline.user
)
end
end
end
end
end
end

View File

@ -43,6 +43,12 @@ module Gitlab
@included_templates ||= @ci_config.included_templates
end
def uses_keyword?(keyword)
jobs.values.any? do |job|
job[keyword].present?
end
end
def included_components
@ci_config.included_components
end

View File

@ -4,7 +4,11 @@ module Gitlab
module SidekiqMiddleware
module ConcurrencyLimit
class QueueManager
attr_reader :redis_key
include ExclusiveLeaseGuard
LEASE_TIMEOUT = 10.seconds
attr_reader :redis_key, :worker_name
def initialize(worker_name:, prefix:)
@worker_name = worker_name
@ -28,19 +32,29 @@ module Gitlab
end
def resume_processing!(limit:)
with_redis do |redis|
jobs = next_batch_from_queue(redis, limit: limit)
break if jobs.empty?
try_obtain_lease do
with_redis do |redis|
jobs = next_batch_from_queue(redis, limit: limit)
break if jobs.empty?
jobs.each { |job| send_to_processing_queue(deserialize(job)) }
remove_processed_jobs(redis, limit: jobs.length)
jobs.each { |job| send_to_processing_queue(deserialize(job)) }
remove_processed_jobs(redis, limit: jobs.length)
jobs.length
jobs.length
end
end
end
private
def lease_timeout
LEASE_TIMEOUT
end
def lease_key
@lease_key ||= "concurrency_limit:queue_manager:{#{worker_name.underscore}}"
end
def with_redis(&)
Gitlab::Redis::SharedState.with(&) # rubocop:disable CodeReuse/ActiveRecord -- Not active record
end
@ -58,8 +72,8 @@ module Gitlab
Gitlab::ApplicationContext.with_raw_context(context) do
args = job['args']
Gitlab::SidekiqLogging::ConcurrencyLimitLogger.instance.resumed_log(@worker_name, args)
worker_klass = @worker_name.safe_constantize
Gitlab::SidekiqLogging::ConcurrencyLimitLogger.instance.resumed_log(worker_name, args)
worker_klass = worker_name.safe_constantize
next if worker_klass.nil?
worker_klass.concurrency_limit_resume(job['buffered_at']).perform_async(*args)

View File

@ -1,51 +1,7 @@
#!/usr/bin/env bash
function retrieve_tests_metadata() {
mkdir -p $(dirname "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}") $(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}") "${RSPEC_PROFILING_FOLDER_PATH}"
curl --fail --location -o "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" ||
echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH:-unknown_file}"
curl --fail --location -o "${FLAKY_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${FLAKY_RSPEC_SUITE_REPORT_PATH}" ||
echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
curl --fail --location -o "${RSPEC_FAST_QUARANTINE_PATH}" "https://gitlab-org.gitlab.io/quality/engineering-productivity/fast-quarantine/${RSPEC_FAST_QUARANTINE_PATH}" ||
echo "" > "${RSPEC_FAST_QUARANTINE_PATH}"
}
function update_tests_metadata() {
local rspec_flaky_folder_path="$(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH:-unknown_folder}")/"
local knapsack_folder_path="$(dirname "${KNAPSACK_RSPEC_SUITE_REPORT_PATH:-unknown_folder}")/"
if [[ ! -f "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" ]]; then
curl --fail --location -o "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" ||
echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}"
fi
if [[ ! -f "${FLAKY_RSPEC_SUITE_REPORT_PATH}" ]]; then
curl --fail --location -o "${FLAKY_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${FLAKY_RSPEC_SUITE_REPORT_PATH}" ||
echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
fi
if [[ ! -f "${RSPEC_FAST_QUARANTINE_PATH}" ]]; then
curl --fail --location -o "${RSPEC_FAST_QUARANTINE_PATH}" "https://gitlab-org.gitlab.io/quality/engineering-productivity/fast-quarantine/${RSPEC_FAST_QUARANTINE_PATH}" ||
echo "" > "${RSPEC_FAST_QUARANTINE_PATH}"
fi
if [[ "$AVERAGE_KNAPSACK_REPORT" == "true" ]]; then
# a comma separated list of file names matching the glob
local new_reports="$(printf '%s,' ${knapsack_folder_path:-unknown_folder}rspec*.json)"
scripts/pipeline/average_reports.rb -i "${KNAPSACK_RSPEC_SUITE_REPORT_PATH:-unknown_file}" -n "${new_reports}"
else
scripts/merge-reports "${KNAPSACK_RSPEC_SUITE_REPORT_PATH:-unknown_file}" ${knapsack_folder_path:-unknown_folder}rspec*.json
fi
export FLAKY_RSPEC_GENERATE_REPORT="true"
scripts/merge-reports "${FLAKY_RSPEC_SUITE_REPORT_PATH:-unknown_file}" ${rspec_flaky_folder_path:-unknown_folder}all_*.json
# Prune flaky tests that weren't flaky in the last 7 days, *after* updating the flaky tests detected
# in this pipeline, so that first_flaky_at for tests that are still flaky is maintained.
scripts/flaky_examples/prune-old-flaky-examples "${FLAKY_RSPEC_SUITE_REPORT_PATH:-unknown_file}"
scripts/setup/tests-metadata.rb update
if [[ "$CI_PIPELINE_SOURCE" == "schedule" ]]; then
if [[ -n "$RSPEC_PROFILING_PGSSLKEY" ]]; then
@ -626,7 +582,7 @@ function cleanup_individual_job_reports() {
rspec/retried_tests_*_report.txt \
${RSPEC_LAST_RUN_RESULTS_FILE:-unknown_folder} \
${RSPEC_PROFILING_FOLDER_PATH:-unknown_folder}/**/*
rmdir ${RSPEC_PROFILING_FOLDER_PAT:-unknown_folder} || true
rmdir ${RSPEC_PROFILING_FOLDER_PATH:-unknown_folder} || true
}
function generate_flaky_tests_reports() {

190
scripts/setup/tests-metadata.rb Executable file
View File

@ -0,0 +1,190 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
require 'fileutils'
require 'json'
class TestsMetadata < Struct.new( # rubocop:disable Style/StructInheritance -- Otherwise we cannot define a nested constant
:mode,
:knapsack_report_path, :flaky_report_path, :fast_quarantine_path,
:average_knapsack,
keyword_init: true)
FALLBACK_JSON = '{}'
def main
abort("Unknown mode: `#{mode}`. It must be `retrieve` or `update`.") unless
mode == 'retrieve' || mode == 'update' || mode == 'verify'
if mode == 'verify'
verify
else
prepare_directories
retrieve
update if mode == 'update'
end
end
private
def verify
verify_knapsack_report
verify_flaky_report
verify_fast_quarantine
puts 'OK'
end
def verify_knapsack_report
report = JSON.parse(File.read(knapsack_report_path))
valid = report.is_a?(Hash) &&
report.all? do |spec, duration|
spec.is_a?(String) && duration.is_a?(Numeric)
end
valid || abort("#{knapsack_report_path} is not a valid Knapsack report")
rescue JSON::ParserError
abort("#{knapsack_report_path} is not valid JSON")
end
def verify_flaky_report
require_relative '../../gems/gitlab-rspec_flaky/lib/gitlab/rspec_flaky/report'
Gitlab::RspecFlaky::Report.load(flaky_report_path).flaky_examples.to_h
rescue JSON::ParserError
abort("#{flaky_report_path} is not valid JSON")
end
def verify_fast_quarantine
require_relative '../../tooling/lib/tooling/fast_quarantine'
fast_quarantine =
Tooling::FastQuarantine.new(fast_quarantine_path: fast_quarantine_path)
fast_quarantine.identifiers
end
def prepare_directories
FileUtils.mkdir_p([
File.dirname(knapsack_report_path),
File.dirname(flaky_report_path),
File.dirname(fast_quarantine_path)
])
end
def retrieve
tasks = []
tasks << async_curl_download_json(
url: "https://gitlab-org.gitlab.io/gitlab/#{knapsack_report_path}",
path: knapsack_report_path,
fallback_content: FALLBACK_JSON
)
tasks << async_curl_download_json(
url: "https://gitlab-org.gitlab.io/gitlab/#{flaky_report_path}",
path: flaky_report_path,
fallback_content: FALLBACK_JSON
)
tasks << async_curl_download(
url: "https://gitlab-org.gitlab.io/quality/engineering-productivity/fast-quarantine/#{fast_quarantine_path}",
path: fast_quarantine_path,
fallback_content: ''
)
tasks.compact.each(&:join)
end
def update
update_knapsack_report
update_flaky_report
# Prune flaky tests that weren't flaky in the last 7 days, *after* updating the flaky tests detected
# in this pipeline, so that first_flaky_at for tests that are still flaky is maintained.
prune_flaky_report
end
def update_knapsack_report
new_reports = Dir["#{File.dirname(knapsack_report_path)}/rspec*.json"]
if average_knapsack
system_abort_if_failed(%W[
scripts/pipeline/average_reports.rb
-i #{knapsack_report_path}
-n #{new_reports.join(',')}
])
else
system_abort_if_failed(%W[
scripts/merge-reports
#{knapsack_report_path}
#{new_reports.join(' ')}
])
end
end
def update_flaky_report
new_reports = Dir["#{File.dirname(flaky_report_path)}/all_*.json"]
system_abort_if_failed(%W[
scripts/merge-reports
#{flaky_report_path}
#{new_reports.join(' ')}
])
end
def prune_flaky_report
system_abort_if_failed(%W[
scripts/flaky_examples/prune-old-flaky-examples
#{flaky_report_path}
])
end
def async_curl_download_json(**args)
async_curl_download(**args) do |content|
JSON.parse(content)
rescue JSON::ParserError
false
end
end
def async_curl_download(url:, path:, fallback_content:)
if force_download? || !File.exist?(path) # rubocop:disable Style/GuardClause -- This is easier to read
async do
success = system(*%W[curl --fail --location -o #{path} #{url}])
if success
if block_given? # rubocop:disable Style/IfUnlessModifier -- This is easier to read
yield(File.read(path)) || File.write(path, fallback_content)
end
else
File.write(path, fallback_content)
end
end
end
end
def force_download?
mode == 'retrieve'
end
def system_abort_if_failed(command)
system(*command) || abort("Command failed for: #{command.join(' ')}")
end
def async(&task)
Thread.new(&task)
end
end
if $PROGRAM_NAME == __FILE__
TestsMetadata.new(
mode: ARGV.first,
knapsack_report_path: ENV['KNAPSACK_RSPEC_SUITE_REPORT_PATH'] ||
'knapsack/report-master.json',
flaky_report_path: ENV['FLAKY_RSPEC_SUITE_REPORT_PATH'] ||
'rspec/flaky/report-suite.json',
fast_quarantine_path: ENV['RSPEC_FAST_QUARANTINE_PATH'] ||
'rspec/fast_quarantine-gitlab.txt',
average_knapsack: ENV['AVERAGE_KNAPSACK_REPORT'] == 'true'
).main
end

View File

@ -206,10 +206,6 @@ function install_tff_gem() {
run_timed_command "gem install test_file_finder --no-document --version 0.3.1"
}
function install_activesupport_gem() {
run_timed_command "gem install activesupport --no-document --version 6.1.7.2"
}
function install_junit_merge_gem() {
run_timed_command "gem install junit_merge --no-document --version 0.1.2"
}

View File

@ -213,6 +213,7 @@ describe('Create work item component', () => {
expectedWorkItemTypeData.widgetDefinitions,
expectedWorkItemTypeData.name,
expectedWorkItemTypeData.id,
expectedWorkItemTypeData.iconName,
);
},
);

View File

@ -58,10 +58,17 @@ describe('WorkItemCreatedUpdated component', () => {
await waitForPromises();
};
it('calls the work item query', async () => {
it('calls the successHandler when the query is completed', async () => {
await createComponent();
expect(successHandler).toHaveBeenCalled();
expect(findLoadingIcon().exists()).toBe(false);
});
it('shows loading icon when the query is still loading', () => {
createComponent();
expect(findLoadingIcon().exists()).toBe(true);
});
it('skips calling the work item query when workItemIid is not defined', async () => {

View File

@ -1,6 +1,7 @@
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
import namespaceWorkItemTypesQueryResponse from 'test_fixtures/graphql/work_items/namespace_work_item_types.query.graphql.json';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
@ -32,7 +33,6 @@ import {
workItemHierarchyTreeEmptyResponse,
workItemHierarchyNoUpdatePermissionResponse,
mockRolledUpCountsByType,
namespaceWorkItemTypesQueryResponse,
} from '../../mock_data';
jest.mock('~/alert');
@ -381,7 +381,11 @@ describe('WorkItemTree', () => {
await nextTick();
expect(findWorkItemLinkChildrenWrapper().props('allowedChildrenByType')).toEqual({
Epic: ['Epic', 'Issue'],
Incident: ['Task'],
Issue: ['Task'],
Objective: ['Key Result', 'Objective'],
Ticket: ['Task'],
});
});
});

View File

@ -5456,39 +5456,6 @@ export const mockUploadErrorDesignMutationResponse = {
},
};
export const namespaceWorkItemTypesQueryResponse = {
data: {
workspace: {
id: 'gid://gitlab/Namespaces/1',
workItemTypes: {
nodes: [
{
__typename: 'WorkItemType',
id: 'gid://gitlab/WorkItems::Type/1',
name: 'Issue',
widgetDefinitions: [
{
type: 'HIERARCHY',
allowedChildTypes: {
nodes: [
{
id: 'gid://gitlab/WorkItems::Type/5',
name: 'Task',
__typename: 'WorkItemType',
},
],
__typename: 'WorkItemTypeConnection',
},
__typename: 'WorkItemWidgetDefinitionHierarchy',
},
],
},
],
},
},
},
};
export const workItemHierarchyNoChildrenTreeResponse = {
data: {
workItem: {

View File

@ -0,0 +1,64 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::KeywordUsage, feature_category: :pipeline_composition do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: user) }
let(:command) { Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user) }
let(:step) { described_class.new(pipeline, command) }
describe '#perform!' do
subject(:perform) { step.perform! }
context 'when the :run keyword is used in the pipeline config' do
before do
allow(command).to receive(:yaml_processor_result)
.and_return(instance_double(Gitlab::Ci::YamlProcessor::Result, uses_keyword?: true))
end
it 'tracks the usage of the :run keyword' do
expect(step).to receive(:track_internal_event)
.with('use_run_keyword_in_cicd_yaml', project: project, user: user)
perform
end
end
context 'when the :run keyword is not used in the pipeline config' do
before do
allow(command).to receive(:yaml_processor_result)
.and_return(instance_double(Gitlab::Ci::YamlProcessor::Result, uses_keyword?: false))
end
it 'does not track the usage of the :run keyword' do
expect(step).not_to receive(:track_internal_event)
perform
end
end
context 'when feature flag is disabled' do
before do
stub_feature_flags(run_keyword_instrumentation: false)
allow(command).to receive(:yaml_processor_result)
.and_return(instance_double(Gitlab::Ci::YamlProcessor::Result, uses_keyword?: true))
end
it 'does not track the usage of the :run keyword' do
expect(step).not_to receive(:track_internal_event)
perform
end
end
end
describe '#break?' do
subject { step.break? }
it { is_expected.to be_falsy }
end
end

View File

@ -45,6 +45,40 @@ module Gitlab
end
end
describe '#uses_keyword?' do
subject { result.uses_keyword?(keyword) }
let(:keyword) { :run }
context 'when the :run keyword is present in a job' do
let(:config_content) do
<<~YAML
job1:
script: echo 'hello'
job2:
run:
- name: 'Test run'
script: echo run step
YAML
end
it { is_expected.to be_truthy }
end
context 'when the :run keyword is not present in any job' do
let(:config_content) do
<<~YAML
job1:
script: echo 'hello'
job2:
script: echo 'world'
YAML
end
it { is_expected.to be_falsy }
end
end
describe '#config_metadata' do
subject(:config_metadata) { result.config_metadata }

View File

@ -76,6 +76,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::ConcurrencyLimit::QueueManager,
let(:setter) { instance_double('Sidekiq::Job::Setter') }
let(:buffered_at) { Time.now.utc }
before do
service.remove_instance_variable(:@lease) if service.instance_variable_defined?(:@lease)
end
it 'puts jobs back into the queue and respects order' do
travel_to(buffered_at) do
jobs.each do |j|
@ -83,6 +87,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::ConcurrencyLimit::QueueManager,
end
end
expect_next_instance_of(Gitlab::ExclusiveLease) do |el|
expect(el).to receive(:try_obtain).and_call_original
end
expect(worker_class).to receive(:concurrency_limit_resume)
.with(a_value_within(1).of(buffered_at.to_f)).twice.and_return(setter)
@ -107,6 +115,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::ConcurrencyLimit::QueueManager,
end
end
expect_next_instance_of(Gitlab::ExclusiveLease) do |el|
expect(el).to receive(:try_obtain).and_call_original
end
expect(Gitlab::ApplicationContext).to receive(:with_raw_context)
.with(stored_context)
.exactly(jobs.count).times.and_call_original
@ -117,5 +129,23 @@ RSpec.describe Gitlab::SidekiqMiddleware::ConcurrencyLimit::QueueManager,
expect { service.resume_processing!(limit: jobs.count) }
.to change { service.has_jobs_in_queue? }.from(true).to(false)
end
context 'when exclusive lease is already being held' do
before do
service.exclusive_lease.try_obtain
end
it 'does not perform enqueue' do
travel_to(buffered_at) do
jobs.each do |j|
service.add_to_queue!(j, worker_context)
end
end
expect(worker_class).not_to receive(:concurrency_limit_resume)
service.resume_processing!(limit: 2)
end
end
end
end

View File

@ -851,6 +851,15 @@ RSpec.describe Member, feature_category: :groups_and_projects do
end
end
describe '.including_user_ids' do
let_it_be(:active_group_member) { create(:group_member, group: group) }
it 'includes members with given user ids' do
expect(group.members.including_user_ids(active_group_member.user_id)).to include active_group_member
expect(group.members.including_user_ids(non_existing_record_id)).to be_empty
end
end
describe '.excluding_users' do
let_it_be(:active_group_member) { create(:group_member, group: group) }

View File

@ -0,0 +1,415 @@
# frozen_string_literal: true
require 'fast_spec_helper'
require 'tempfile'
require_relative '../../../scripts/setup/tests-metadata'
# rubocop:disable Gitlab/Json, Lint/MissingCopEnableDirective -- It's not intended to have extra dependency
RSpec.describe TestsMetadata, feature_category: :tooling do # rubocop:disable Rails/FilePath,RSpec/SpecFilePathFormat -- We use dashes in scripts
subject(:metadata) do
described_class.new(
mode: mode,
knapsack_report_path: knapsack_report_path,
flaky_report_path: flaky_report_path,
fast_quarantine_path: fast_quarantine_path,
average_knapsack: average_knapsack)
end
let(:average_knapsack) { true }
let(:knapsack_report_path) { 'knapsack_report/path' }
let(:flaky_report_path) { 'flaky_report/path' }
let(:fast_quarantine_path) { 'fast_quarantine/path' }
let(:aborted) { StandardError.new }
describe '#main' do
context 'when mode is retrieve' do
let(:mode) { 'retrieve' }
it 'calls prepare_directories and retrieve' do
expect(metadata).to receive(:prepare_directories)
expect(metadata).to receive(:retrieve)
expect(metadata).not_to receive(:update)
expect(metadata).not_to receive(:verify)
metadata.main
end
end
context 'when mode is update' do
let(:mode) { 'update' }
it 'calls prepare_directories and retrieve and update' do
expect(metadata).to receive(:prepare_directories)
expect(metadata).to receive(:retrieve)
expect(metadata).to receive(:update)
expect(metadata).not_to receive(:verify)
metadata.main
end
end
context 'when mode is verify' do
let(:mode) { 'verify' }
it 'calls prepare_directories and retrieve and update' do
expect(metadata).not_to receive(:prepare_directories)
expect(metadata).not_to receive(:retrieve)
expect(metadata).not_to receive(:update)
expect(metadata).to receive(:verify)
metadata.main
end
end
end
describe '#prepare_directories' do
let(:mode) { 'retrieve' }
it 'prepares the directories' do
expect(FileUtils).to receive(:mkdir_p).with([
File.dirname(knapsack_report_path),
File.dirname(flaky_report_path),
File.dirname(fast_quarantine_path)
])
metadata.__send__(:prepare_directories)
end
end
shared_context 'with fake reports' do
let(:knapsack_report_path) { knapsack_report_file.path }
let(:flaky_report_path) { flaky_report_file.path }
let(:fast_quarantine_path) { fast_quarantine_file.path }
let(:knapsack_report_file) { tempfile_write('knapsack', knapsack_report) }
let(:flaky_report_file) { tempfile_write('flaky', flaky_report) }
let(:fast_quarantine_file) { tempfile_write('fast_quarantine', fast_quarantine_report) }
let(:knapsack_report) { json_report }
let(:flaky_report) { json_report }
let(:fast_quarantine_report) { text_report }
let(:json_report) { '{"valid":"json"}' }
let(:text_report) { 'This is an apple' }
after do
[knapsack_report_file, flaky_report_file, fast_quarantine_file]
.each(&:unlink)
end
def tempfile_write(path, content)
file = Tempfile.new(path)
file.write(content)
file.close
file
end
end
describe '#retrieve' do
include_context 'with fake reports'
let(:mode) { 'retrieve' }
let(:expect_curl) { true }
let(:curl_knapsack_return) { true }
let(:curl_flaky_report_return) { true }
let(:curl_fast_quarantine_return) { true }
before do
expect_system_curl
end
def expect_system_curl
expect_system_curl_with(%W[
curl --fail --location -o #{knapsack_report_path} https://gitlab-org.gitlab.io/gitlab/#{knapsack_report_path}
], curl_knapsack_return)
expect_system_curl_with(%W[
curl --fail --location -o #{flaky_report_path} https://gitlab-org.gitlab.io/gitlab/#{flaky_report_path}
], curl_flaky_report_return)
expect_system_curl_with(%W[
curl --fail --location -o #{fast_quarantine_path} https://gitlab-org.gitlab.io/quality/engineering-productivity/fast-quarantine/#{fast_quarantine_path}
], curl_fast_quarantine_return)
end
def expect_system_curl_with(arguments, curl_return)
to =
if expect_curl
:to
else
:not_to
end
expectation =
expect(metadata).public_send(to, receive(:system)).with(*arguments) # rubocop:disable RSpec/MissingExpectationTargetMethod -- it's dynamic
expectation.and_return(curl_return) if expect_curl
end
it 'downloads the metadata and parse it respectively' do
metadata.__send__(:retrieve)
expect(File.read(knapsack_report_path)).to eq(json_report)
expect(File.read(flaky_report_path)).to eq(json_report)
expect(File.read(fast_quarantine_path)).to eq(text_report)
end
context 'when JSON report we download is invalid' do
let(:json_report) { 'This is a bad JSON' }
it 'writes a fallback JSON file instead of using invalid JSON' do
metadata.__send__(:retrieve)
expect(File.read(knapsack_report_path)).to eq(described_class::FALLBACK_JSON)
expect(File.read(flaky_report_path)).to eq(described_class::FALLBACK_JSON)
expect(File.read(fast_quarantine_path)).to eq(text_report)
end
end
context 'when fast quarantine report failed to download' do
let(:curl_fast_quarantine_return) { false }
it 'writes a fallback file with fallback content' do
metadata.__send__(:retrieve)
expect(File.read(fast_quarantine_path)).to eq('')
end
end
context 'when it is update mode' do
let(:mode) { 'update' }
let(:expect_curl) { false }
it 'does not download tests metadata via curl' do # rubocop:disable RSpec/NoExpectationExample -- set in before already, see expect_system_curl_with
metadata.__send__(:retrieve)
end
end
end
describe '#update' do
let(:mode) { 'update' }
it 'updates all reports' do
expect(metadata).to receive(:update_knapsack_report)
expect(metadata).to receive(:update_flaky_report)
expect(metadata).to receive(:prune_flaky_report)
metadata.__send__(:update)
end
end
describe '#update_knapsack_report' do
include_context 'with fake reports'
let(:mode) { 'update' }
let(:knapsack_report_dir) { File.dirname(knapsack_report_path) }
let(:individual_knapsack_reports) do
%W[
#{knapsack_report_dir}/rspec-0.json
#{knapsack_report_dir}/rspec-1.json
]
end
before do
allow(Dir).to receive(:[]).with("#{knapsack_report_dir}/rspec*.json")
.and_return(individual_knapsack_reports)
end
it 'updates knapsack report' do
expect(metadata).to receive(:system).with(
'scripts/pipeline/average_reports.rb',
'-i', knapsack_report_path,
'-n', individual_knapsack_reports.join(',')
).and_return(true)
metadata.__send__(:update_knapsack_report)
end
context 'when scripts/pipeline/average_reports.rb failed' do
it 'aborts the process' do
expect(metadata).to receive(:system).with(
'scripts/pipeline/average_reports.rb',
'-i', knapsack_report_path,
'-n', individual_knapsack_reports.join(',')
).and_return(false)
expect(metadata).to receive(:abort)
metadata.__send__(:update_knapsack_report)
end
end
context 'when average_knapsack is false' do
let(:average_knapsack) { false }
it 'uses scripts/merge-reports to merge reports instead' do
expect(metadata).to receive(:system).with(
'scripts/merge-reports',
knapsack_report_path,
individual_knapsack_reports.join(' ')
).and_return(true)
metadata.__send__(:update_knapsack_report)
end
context 'when scripts/merge-reports failed' do
it 'aborts the process' do
expect(metadata).to receive(:system).with(
'scripts/merge-reports',
knapsack_report_path,
individual_knapsack_reports.join(' ')
).and_return(false)
expect(metadata).to receive(:abort)
metadata.__send__(:update_knapsack_report)
end
end
end
end
describe '#update_flaky_report' do
include_context 'with fake reports'
let(:mode) { 'update' }
let(:flaky_report_dir) { File.dirname(flaky_report_path) }
let(:individual_flaky_reports) do
%W[
#{flaky_report_dir}/all_0.json
#{flaky_report_dir}/all_1.json
]
end
before do
allow(Dir).to receive(:[]).with("#{flaky_report_dir}/all_*.json")
.and_return(individual_flaky_reports)
end
it 'updates flaky report' do
expect(metadata).to receive(:system).with(
'scripts/merge-reports',
flaky_report_path,
individual_flaky_reports.join(' ')
).and_return(true)
metadata.__send__(:update_flaky_report)
end
context 'when scripts/merge-reports failed' do
it 'aborts the process' do
expect(metadata).to receive(:system).with(
'scripts/merge-reports',
flaky_report_path,
individual_flaky_reports.join(' ')
).and_return(false)
expect(metadata).to receive(:abort).and_raise(aborted)
expect do
metadata.__send__(:update_flaky_report)
end.to raise_error(aborted)
end
end
end
describe '#prune_flaky_report' do
include_context 'with fake reports'
let(:mode) { 'update' }
let(:flaky_report_dir) { File.dirname(flaky_report_path) }
it 'prunes flaky report' do
expect(metadata).to receive(:system).with(
'scripts/flaky_examples/prune-old-flaky-examples',
flaky_report_path
).and_return(true)
metadata.__send__(:prune_flaky_report)
end
context 'when scripts/flaky_examples/prune-old-flaky-examples failed' do
it 'aborts the process' do
expect(metadata).to receive(:system).with(
'scripts/flaky_examples/prune-old-flaky-examples',
flaky_report_path
).and_return(false)
expect(metadata).to receive(:abort)
metadata.__send__(:prune_flaky_report)
end
end
end
describe '#verify' do
include_context 'with fake reports'
shared_examples 'fail verification and abort' do
it 'calls abort to fail the verification' do
expect(metadata).to receive(:abort).and_raise(aborted)
expect do
metadata.__send__(:verify)
end.to raise_error(aborted)
end
end
let(:mode) { 'verify' }
let(:knapsack_report) { JSON.dump({ __FILE__ => 123.456 }) }
let(:flaky_report) do
<<~JSON
{
"fa1659e83e918bab8cb80518ea5f80f4": {
"first_flaky_at": "2023-12-07 14:21:34 +0000",
"last_flaky_at": "2024-09-02 22:18:36 +0000",
"last_flaky_job": "https://gitlab.com/gitlab-org/gitlab/-/jobs/7724274859",
"last_attempts_count": 2,
"flaky_reports": 954,
"feature_category": "integrations",
"example_id": "./spec/features/projects/integrations/user_activates_issue_tracker_spec.rb[1:4:1:2:1]",
"file": "./spec/features/projects/integrations/user_activates_issue_tracker_spec.rb",
"line": 49,
"description": "User activates issue tracker behaves like external issue tracker activation user sets and activates the integration when the connection test fails activates the integration"
}
}
JSON
end
let(:fast_quarantine_report) do
<<~TEXT
qa/specs/features/ee/browser_ui/3_create/remote_development/workspace_actions_spec.rb
spec/features/projects/work_items/work_item_spec.rb:67
TEXT
end
it 'validates reports are valid' do
expect { metadata.__send__(:verify) }.to output("OK\n").to_stdout
end
context 'when knapsack report has type error' do
let(:knapsack_report) { JSON.dump({ __FILE__ => '123.456' }) }
it_behaves_like 'fail verification and abort'
end
context 'when knapsack report is not valid JSON' do
let(:knapsack_report) { { __FILE__ => 123.456 }.to_s }
it_behaves_like 'fail verification and abort'
end
context 'when flaky report is not valid JSON' do
let(:flaky_report) { 'This is an apple' }
it_behaves_like 'fail verification and abort'
end
context 'when fast quarantine report is not valid',
skip: 'This is not possible because it is always considered valid'
end
end