Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
c7454acb66
commit
a9ae4a7594
|
|
@ -1 +1 @@
|
|||
89bb339a5a7e5b95b5595d35958cc3c7607bde3c
|
||||
3ab6dc78bf07f1ef5dd697552261756b1e9f97a9
|
||||
|
|
|
|||
|
|
@ -258,30 +258,22 @@ class ContainerRepository < ApplicationRecord
|
|||
def set_delete_ongoing_status
|
||||
now = Time.zone.now
|
||||
|
||||
values = {
|
||||
update_columns(
|
||||
status: :delete_ongoing,
|
||||
delete_started_at: now,
|
||||
status_updated_at: now
|
||||
}
|
||||
|
||||
values[:next_delete_attempt_at] = nil if Feature.enabled?(:set_delete_failed_container_repository, project)
|
||||
|
||||
update_columns(values)
|
||||
status_updated_at: now,
|
||||
next_delete_attempt_at: nil
|
||||
)
|
||||
end
|
||||
|
||||
def set_delete_scheduled_status
|
||||
values = {
|
||||
update_columns(
|
||||
status: :delete_scheduled,
|
||||
delete_started_at: nil,
|
||||
status_updated_at: Time.zone.now
|
||||
}
|
||||
|
||||
if Feature.enabled?(:set_delete_failed_container_repository, project)
|
||||
values[:failed_deletion_count] = failed_deletion_count + 1
|
||||
values[:next_delete_attempt_at] = next_delete_attempt_with_delay
|
||||
end
|
||||
|
||||
update_columns(values)
|
||||
status_updated_at: Time.zone.now,
|
||||
failed_deletion_count: failed_deletion_count + 1,
|
||||
next_delete_attempt_at: next_delete_attempt_with_delay
|
||||
)
|
||||
end
|
||||
|
||||
def set_delete_failed_status
|
||||
|
|
|
|||
|
|
@ -53,8 +53,7 @@ module ContainerRegistry
|
|||
def update_next_container_repository_status
|
||||
return unless next_container_repository
|
||||
|
||||
if next_container_repository.failed_deletion_count >= ContainerRepository::MAX_DELETION_FAILURES &&
|
||||
Feature.enabled?(:set_delete_failed_container_repository, next_container_repository.project)
|
||||
if next_container_repository.failed_deletion_count >= ContainerRepository::MAX_DELETION_FAILURES
|
||||
next_container_repository.set_delete_failed_status
|
||||
else
|
||||
next_container_repository.set_delete_scheduled_status
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
---
|
||||
name: set_delete_failed_container_repository
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/480652
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/166119
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/490354
|
||||
milestone: '17.5'
|
||||
group: group::container registry
|
||||
type: gitlab_com_derisk
|
||||
default_enabled: false
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
migration_job_name: DeleteOrphanedPipelineVariableRecords
|
||||
description: Deletes corrupted rows from p_ci_pipeline_variables table
|
||||
feature_category: continuous_integration
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/169441
|
||||
milestone: '17.6'
|
||||
queued_migration_version: 20241016131601
|
||||
finalized_by: # version of the migration that finalized this BBM
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class QueueDeleteOrphanedPipelineVariableRecords < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.6'
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_ci
|
||||
|
||||
MIGRATION = "DeleteOrphanedPipelineVariableRecords"
|
||||
DELAY_INTERVAL = 2.minutes
|
||||
BATCH_SIZE = 1000
|
||||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:p_ci_pipeline_variables,
|
||||
:pipeline_id,
|
||||
job_interval: DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
batch_class_name: 'LooseIndexScanBatchingStrategy',
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(MIGRATION, :p_ci_pipeline_variables, :pipeline_id, [])
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
41127c2fcc61d93b91ac88a9f21c40ed503b18a25140c7ef6a30ec37d83f1f54
|
||||
|
|
@ -36,18 +36,17 @@ This access can also [be restricted](#limit-job-token-scope-for-public-or-intern
|
|||
| [Deployments API](../../api/deployments.md) | `GET` requests are public by default. |
|
||||
| [Environments API](../../api/environments.md) | `GET` requests are public by default. |
|
||||
| [Job artifacts API](../../api/job_artifacts.md#get-job-artifacts) | `GET` requests are public by default. |
|
||||
| [Jobs API](../../api/jobs.md#get-job-tokens-job) | To get the job token's job. |
|
||||
| [API endpoint to get the job of a job token](../../api/jobs.md#get-job-tokens-job) | To get the job token's job. |
|
||||
| [Package registry](../../user/packages/package_registry/index.md#to-build-packages) | |
|
||||
| [Packages API](../../api/packages.md) | `GET` requests are public by default. |
|
||||
| [Pipeline triggers](../../api/pipeline_triggers.md) | Used with the `token=` parameter to [trigger a multi-project pipeline](../pipelines/downstream_pipelines.md#trigger-a-multi-project-pipeline-by-using-the-api). |
|
||||
| [Pipelines API](../../api/pipelines.md#update-pipeline-metadata) | To update pipeline metadata. |
|
||||
| [Update pipeline metadata API endpoint](../../api/pipelines.md#update-pipeline-metadata) | To update pipeline metadata. |
|
||||
| [Release links API](../../api/releases/links.md) | |
|
||||
| [Releases API](../../api/releases/index.md) | `GET` requests are public by default. |
|
||||
| [Secure files](../secure_files/index.md#use-secure-files-in-cicd-jobs) | The `download-secure-files` tool authenticates with a CI/CD job token by default. |
|
||||
| [Terraform plan](../../user/infrastructure/index.md) | |
|
||||
|
||||
A job token can access a project's resources without any configuration, but it might
|
||||
give extra permissions that aren't necessary. There is [a proposal](https://gitlab.com/groups/gitlab-org/-/epics/3559)
|
||||
Other API endpoints are not accessible using a job token. There is [a proposal](https://gitlab.com/groups/gitlab-org/-/epics/3559)
|
||||
to redesign the feature for more granular control of access permissions.
|
||||
|
||||
## GitLab CI/CD job token security
|
||||
|
|
|
|||
|
|
@ -41,44 +41,122 @@ ensure coverage for all of these dependency types. To cover as much of your risk
|
|||
we encourage you to use all of our security scanners. For a comparison of these features, see
|
||||
[Dependency Scanning compared to Container Scanning](../../comparison_dependency_and_container_scanning.md).
|
||||
|
||||
## Supported package managers
|
||||
## Supported package types
|
||||
|
||||
For a list of supported package managers, see the analyzer's
|
||||
[supported files](https://gitlab.com/gitlab-org/security-products/analyzers/dependency-scanning/#supported-files).
|
||||
The vulnerability scanning of SBOM files is performed in GitLab by the same scanner used by
|
||||
[Continuous Vulnerability Scanning](../../continuous_vulnerability_scanning/index.md).
|
||||
In order for security scanning to work for your package manager, advisory information must be
|
||||
available for the components present in the SBOM report.
|
||||
|
||||
See [Supported package types](../../continuous_vulnerability_scanning/index.md#supported-package-types).
|
||||
|
||||
## Dependency detection workflow
|
||||
|
||||
The dependency detection workflow is as follows:
|
||||
|
||||
1. The application to be scanned provides a CycloneDX SBOM file or creates one.
|
||||
1. The application to be scanned provides a
|
||||
[CycloneDX SBOM report](../../../../ci/yaml/artifacts_reports.md#artifactsreportscyclonedx)
|
||||
or creates one by [enabling the GitLab Dependency Scanning analyzer](#enabling-the-analyzer).
|
||||
1. GitLab checks each of the dependencies listed in the SBOM against the GitLab Advisory Database.
|
||||
1. If the dependency scanning job is run on the default branch: vulnerabilities are created, and can be seen in the vulnerability report.
|
||||
1. If the SBOM report is declared by a CI/CD job on the default branch: vulnerabilities are created,
|
||||
and can be seen in the vulnerability report.
|
||||
|
||||
If the dependency scanning job is run on a non-default branch: security findings are created, and can be seen in the pipeline security tab and MR security widget.
|
||||
If the SBOM report is declared by a CI/CD job on a non-default branch: no vulnerability
|
||||
scanning takes place. Improvement to the feature is being tracked in
|
||||
[Epic 14636](https://gitlab.com/groups/gitlab-org/-/epics/14636) so that security findings are
|
||||
created, and can be seen in the pipeline security tab and MR security widget.
|
||||
|
||||
## Configuration
|
||||
|
||||
Enable the dependency scanning analyzer to ensure it scans your application’s dependencies for known vulnerabilities.
|
||||
You can then adjust its behavior by configuring the CI/CD component's inputs.
|
||||
- Enable the dependency scanning analyzer to generate a CycloneDX SBOM containing your
|
||||
application's dependencies. Once this report is uploaded to GitLab, the dependencies are scanned
|
||||
for known vulnerabilities.
|
||||
- You can adjust the analyzer behavior by configuring the CI/CD component's inputs.
|
||||
|
||||
For a list of languages and package managers supported by the analyzer, see
|
||||
[supported files](https://gitlab.com/gitlab-org/security-products/analyzers/dependency-scanning/#supported-files).
|
||||
|
||||
After a
|
||||
[CycloneDX SBOM report](../../../../ci/yaml/artifacts_reports.md#artifactsreportscyclonedx)
|
||||
is uploaded, GitLab automatically scans all
|
||||
[supported package types](../../continuous_vulnerability_scanning/index.md#supported-package-types)
|
||||
present in the report.
|
||||
|
||||
## Enabling the analyzer
|
||||
|
||||
The Dependency Scanning analyzer produces a CycloneDX SBOM report compatible with GitLab. If your
|
||||
application can't generate such a report, you can use the GitLab analyzer to produce one.
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- A [supported lock file or dependency graph](https://gitlab.com/gitlab-org/security-products/analyzers/dependency-scanning/#supported-files)
|
||||
must exist in the repository or must be passed as an artifact to the `dependency-scanning` job.
|
||||
- The component's [stage](https://gitlab.com/explore/catalog/components/dependency-scanning) is required in the `.gitlab-ci.yml` file.
|
||||
- With self-managed runners you need a GitLab Runner with the
|
||||
[`docker`](https://docs.gitlab.com/runner/executors/docker.html) or
|
||||
[`kubernetes`](https://docs.gitlab.com/runner/install/kubernetes.html) executor.
|
||||
- If you're using SaaS runners on GitLab.com, this is enabled by default.
|
||||
- A [supported lock file or dependency graph](https://gitlab.com/gitlab-org/security-products/analyzers/dependency-scanning/#supported-files)
|
||||
must be in the repository.
|
||||
Alternatively, configure the CI/CD job to output either as a job artifact,
|
||||
ensuring the artifacts are generated in a stage before the `dependency-scanning`
|
||||
job's stage. See the following example.
|
||||
|
||||
To enable the analyzer, use the `main` [dependency scanning CI/CD component](https://gitlab.com/explore/catalog/components/dependency-scanning).
|
||||
To enable the analyzer, use the `main` [dependency scanning CI/CD component](https://gitlab.com/explore/catalog/components/dependency-scanning):
|
||||
|
||||
### Enabling the analyzer for a Maven project
|
||||
```yaml
|
||||
include:
|
||||
- component: $CI_SERVER_FQDN/components/dependency-scanning/main@0
|
||||
```
|
||||
|
||||
### Language-specific instructions
|
||||
|
||||
If your project doesn't have a supported lock file dependency graph commited to its
|
||||
repository, you need to provide one.
|
||||
|
||||
The examples below show how to create a file that is supported by the GitLab analyzer for popular
|
||||
languages and package managers.
|
||||
|
||||
#### Gradle
|
||||
|
||||
To enable the CI/CD component on a Gradle project:
|
||||
|
||||
1. Edit the `build.gradle` or `build.gradle.kts` to use the
|
||||
[gradle-dependency-lock-plugin](https://github.com/nebula-plugins/gradle-dependency-lock-plugin/wiki/Usage#example).
|
||||
1. Configure the `.gitlab-ci.yml` file to generate the `dependencies.lock` artifacts, and pass them
|
||||
to the `dependency-scanning` job.
|
||||
|
||||
The following example demonstrates how to configure the component
|
||||
for a Gradle project.
|
||||
|
||||
```yaml
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
|
||||
# Define the image that contains Java and Gradle
|
||||
image: gradle:8.0-jdk11
|
||||
|
||||
include:
|
||||
- component: $CI_SERVER_FQDN/components/dependency-scanning/main@0
|
||||
|
||||
build:
|
||||
# Running in the build stage ensures that the dependency-scanning job
|
||||
# receives the maven.graph.json artifacts.
|
||||
stage: build
|
||||
script:
|
||||
- gradle generateLock saveLock
|
||||
- gradle assemble
|
||||
# generateLock saves the lock file in the build/ directory of a project
|
||||
# and saveLock copies it into the root of a project. To avoid duplicates
|
||||
# and get an accurate location of the dependency, use find to remove the
|
||||
# lock files in the build/ directory only.
|
||||
after_script:
|
||||
- find . -path '*/build/dependencies.lock' -print -delete
|
||||
# Collect all dependencies.lock artifacts and pass them onto jobs
|
||||
# in sequential stages.
|
||||
artifacts:
|
||||
paths:
|
||||
- "**/dependencies.lock"
|
||||
|
||||
```
|
||||
|
||||
#### Maven
|
||||
|
||||
The following example `.gitlab-ci.yml` demonstrates how to enable the CI/CD
|
||||
component on a Maven project. The dependency graph is output as a job artifact
|
||||
|
|
@ -110,48 +188,6 @@ build:
|
|||
|
||||
```
|
||||
|
||||
### Enabling the analyzer for a Gradle project
|
||||
|
||||
To enable the CI/CD component on a Gradle project:
|
||||
|
||||
1. Edit the `build.gradle` or `build.gradle.kts` to use the [gradle-dependency-lock-plugin](https://github.com/nebula-plugins/gradle-dependency-lock-plugin/wiki/Usage#example).
|
||||
1. Configure the `.gitlab-ci.yml` file to generate the `dependencies.lock` artifacts, and pass them to the `dependency-scanning` job.
|
||||
|
||||
The following example demonstrates how to configure the component
|
||||
for a Gradle project.
|
||||
|
||||
```yaml
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
|
||||
# Define the image that contains Java and Gradle
|
||||
image: gradle:8.0-jdk11
|
||||
|
||||
include:
|
||||
- component: $CI_SERVER_FQDN/components/dependency-scanning/main@0.4.0
|
||||
|
||||
build:
|
||||
# Running in the build stage ensures that the dependency-scanning job
|
||||
# receives the maven.graph.json artifacts.
|
||||
stage: build
|
||||
script:
|
||||
- gradle generateLock saveLock
|
||||
- gradle assemble
|
||||
# generateLock saves the lock file in the build/ directory of a project
|
||||
# and saveLock copies it into the root of a project. To avoid duplicates
|
||||
# and get an accurate location of the dependency, use find to remove the
|
||||
# lock files in the build/ directory only.
|
||||
after_script:
|
||||
- find . -path '*/build/dependencies.lock' -print -delete
|
||||
# Collect all dependencies.lock artifacts and pass them onto jobs
|
||||
# in sequential stages.
|
||||
artifacts:
|
||||
paths:
|
||||
- "**/dependencies.lock"
|
||||
|
||||
```
|
||||
|
||||
## Customizing analyzer behavior
|
||||
|
||||
The analyzer can be customized by configuring the CI/CD component's
|
||||
|
|
@ -222,7 +258,7 @@ stages:
|
|||
- merge-cyclonedx-sboms
|
||||
|
||||
include:
|
||||
- component: $CI_SERVER_FQDN/components/dependency-scanning/main@0.4.0
|
||||
- component: $CI_SERVER_FQDN/components/dependency-scanning/main@0
|
||||
|
||||
merge cyclonedx sboms:
|
||||
stage: merge-cyclonedx-sboms
|
||||
|
|
|
|||
|
|
@ -0,0 +1,71 @@
|
|||
---
|
||||
stage: Secure
|
||||
group: Static Analysis
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
|
||||
---
|
||||
|
||||
# Evaluate GitLab SAST
|
||||
|
||||
DETAILS:
|
||||
**Tier:** Ultimate
|
||||
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
|
||||
|
||||
You might choose to evaluate GitLab SAST before using it in your organization.
|
||||
Consider the following guidance as you plan and conduct your evaluation.
|
||||
|
||||
## Choose a test codebase
|
||||
|
||||
When choosing a codebase to test SAST, you should:
|
||||
|
||||
- Test in a repository where you can safely modify the CI/CD configuration without getting in the way of normal development activities.
|
||||
SAST scans run in your CI/CD pipeline, so you'll need to make a small edit to the CI/CD configuration to [enable SAST](index.md#configuration).
|
||||
- You can make a fork or copy of an existing repository for testing. This way, you can set up your testing environment without any chance of interrupting normal development.
|
||||
- Use a codebase that matches your organization's typical technology stack.
|
||||
- Use a language that [Advanced SAST supports](gitlab_advanced_sast.md#supported-languages).
|
||||
Advanced SAST produces more accurate results than other [analyzers](analyzers.md).
|
||||
|
||||
Your test project must have GitLab Ultimate. Only Ultimate includes [features](index.md#features) like:
|
||||
|
||||
- Proprietary cross-file, cross-function scanning with GitLab Advanced SAST.
|
||||
- The merge request widget, pipeline security report, and default-branch vulnerability report that makes scan results visible and actionable.
|
||||
|
||||
### Benchmarks and example projects
|
||||
|
||||
If you choose to use a benchmark or an intentionally vulnerable application for testing, remember that these applications:
|
||||
|
||||
- Focus on specific vulnerability types.
|
||||
The benchmark's focus may be different from the vulnerability types your organization prioritizes for discovery and remediation.
|
||||
- Use specific technologies in specific ways that may differ from how your organization builds software.
|
||||
- Report results in ways that may implicitly emphasize certain criteria over others.
|
||||
For example, you may prioritize precision (fewer false-positive results) while the benchmark only scores based on recall (fewer false-negative results).
|
||||
|
||||
[Epic 15296](https://gitlab.com/groups/gitlab-org/-/epics/15296) tracks work to recommend specific projects for testing.
|
||||
|
||||
### AI-generated test code
|
||||
|
||||
You should not use AI tools to create vulnerable code for testing SAST.
|
||||
AI models often return code that is not truly exploitable.
|
||||
|
||||
For example:
|
||||
|
||||
- AI tools often write small functions that take a parameter and use it in a sensitive context (called a "sink"), without actually receiving any user input.
|
||||
This can be a safe design if the function is only called with program-controlled values, like constants.
|
||||
The code is not vulnerable unless user input is allowed to flow to these sinks without first being sanitized or validated.
|
||||
- AI tools may comment out part of the vulnerability to prevent you from accidentally running the code.
|
||||
|
||||
Reporting vulnerabilities in these unrealistic examples would cause false-positive results in real-world code.
|
||||
GitLab SAST is not designed to report vulnerabilities in these cases.
|
||||
|
||||
## Conduct the test
|
||||
|
||||
After you choose a codebase to test with, you're ready to conduct the test. You can follow these steps:
|
||||
|
||||
1. [Enable SAST](index.md#configuration) by creating a merge request (MR) that adds SAST to the CI/CD configuration.
|
||||
- Be sure to set the CI/CD variable to [enable Advanced SAST](gitlab_advanced_sast.md#enable-advanced-sast-scanning) for more accurate results.
|
||||
1. Merge the MR to the repository's default branch.
|
||||
1. Open the [Vulnerability Report](../vulnerability_report/index.md) to see the vulnerabilities found on the default branch.
|
||||
- If you're using Advanced SAST, you can use the [Tool filter](../vulnerability_report/index.md#tool-filter) to show results only from that scanner.
|
||||
1. Review vulnerability results.
|
||||
- Check the [code flow view](../vulnerabilities/index.md#vulnerability-code-flow) for Advanced SAST vulnerabilities that involve tainted user input, like SQL injection or path traversal.
|
||||
- If you have GitLab Duo Enterprise, [explain](../vulnerabilities/index.md#explaining-a-vulnerability) or [resolve](../vulnerabilities/index.md#resolve-a-vulnerability) a vulnerability.
|
||||
1. To see how scanning works as new code is developed, create a new merge request that changes application code and adds a new vulnerability or weakness.
|
||||
|
|
@ -84,7 +84,7 @@ On GitLab self-managed, you must also use a GitLab version that supports Advance
|
|||
- The latest template includes Advanced SAST in GitLab 17.2 or later. Note that you [should not mix latest and stable templates](../index.md#template-editions) in a single project.
|
||||
- At a minimum, GitLab Advanced SAST requires version 17.1 or later.
|
||||
|
||||
### Enabling Advanced SAST scanning
|
||||
### Enable Advanced SAST scanning
|
||||
|
||||
Advanced SAST is included in the standard GitLab SAST CI/CD template, but isn't yet enabled by default.
|
||||
To enable it, set the CI/CD variable `GITLAB_ADVANCED_SAST_ENABLED` to `true`.
|
||||
|
|
|
|||
|
|
@ -16,6 +16,15 @@ It automatically chooses which analyzers to run based on which programming langu
|
|||
Each analyzer processes the code, then uses rules to find possible weaknesses in source code.
|
||||
The analyzer's rules determine what types of weaknesses it reports.
|
||||
|
||||
## Scope of rules
|
||||
|
||||
GitLab SAST focuses on security weaknesses and vulnerabilities. It does not aim to find general bugs or assess overall code quality or maintainability.
|
||||
|
||||
GitLab manages the detection ruleset with a focus on identifying actionable security weaknesses and vulnerabilities.
|
||||
The ruleset is designed to provide broad coverage against the most impactful vulnerabilities while minimizing false positives (reported vulnerabilities where no vulnerability exists).
|
||||
|
||||
GitLab SAST is designed to be used in its default configuration, but you can [configure detection rules](#configure-rules-in-your-projects) if needed.
|
||||
|
||||
## Source of rules
|
||||
|
||||
### Advanced SAST
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
stage: AI-powered
|
||||
group: AI Model Validation
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
|
||||
ignore_in_report: true
|
||||
---
|
||||
|
||||
# Suggested Reviewers data usage
|
||||
|
|
|
|||
|
|
@ -0,0 +1,37 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BackgroundMigration
|
||||
class DeleteOrphanedPipelineVariableRecords < BatchedMigrationJob
|
||||
operation_name :delete_orphaned_pipeline_variable_records
|
||||
feature_category :continuous_integration
|
||||
|
||||
class CiPipeline < ::Ci::ApplicationRecord
|
||||
self.table_name = :p_ci_pipelines
|
||||
self.primary_key = :id
|
||||
end
|
||||
|
||||
def perform
|
||||
distinct_each_batch do |batch|
|
||||
pipeline_ids = batch.pluck(batch_column)
|
||||
pipelines_query = CiPipeline
|
||||
.where('p_ci_pipeline_variables.pipeline_id = p_ci_pipelines.id')
|
||||
.where('p_ci_pipeline_variables.partition_id = p_ci_pipelines.partition_id')
|
||||
.select(1)
|
||||
|
||||
base_relation
|
||||
.where(batch_column => pipeline_ids)
|
||||
.where('NOT EXISTS (?)', pipelines_query)
|
||||
.delete_all
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def base_relation
|
||||
define_batchable_model(batch_table, connection: connection, primary_key: :id)
|
||||
.where(batch_column => start_id..end_id)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -3,8 +3,8 @@ import { nextTick } from 'vue';
|
|||
import { scrollToElement } from '~/lib/utils/common_utils';
|
||||
import FormCustomHeaders from '~/webhooks/components/form_custom_headers.vue';
|
||||
import FormCustomHeaderItem from '~/webhooks/components/form_custom_header_item.vue';
|
||||
|
||||
import { mountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import CrudComponent from '~/vue_shared/components/crud_component.vue';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
|
||||
|
||||
jest.mock('~/lib/utils/common_utils');
|
||||
|
|
@ -24,8 +24,11 @@ describe('FormCustomHeaders', () => {
|
|||
});
|
||||
|
||||
const createComponent = ({ props } = {}) => {
|
||||
wrapper = mountExtended(FormCustomHeaders, {
|
||||
wrapper = shallowMountExtended(FormCustomHeaders, {
|
||||
propsData: props,
|
||||
stubs: {
|
||||
CrudComponent,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
|
|
@ -49,7 +52,7 @@ describe('FormCustomHeaders', () => {
|
|||
|
||||
expect(findAllCustomHeaderItems()).toHaveLength(3);
|
||||
|
||||
const lastItem = findAllCustomHeaderItems().at(-1);
|
||||
const lastItem = findAllCustomHeaderItems().at(2);
|
||||
expect(lastItem.props()).toMatchObject({
|
||||
headerKey: '',
|
||||
headerValue: '',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,61 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedPipelineVariableRecords,
|
||||
feature_category: :continuous_integration, migration: :gitlab_ci do
|
||||
let(:pipelines_table) { table(:p_ci_pipelines, database: :ci, primary_key: :id) }
|
||||
let(:variables_table) { table(:p_ci_pipeline_variables, database: :ci, primary_key: :id) }
|
||||
|
||||
let(:default_attributes) { { project_id: 600, partition_id: 100 } }
|
||||
let!(:regular_pipeline) { pipelines_table.create!(default_attributes) }
|
||||
let!(:deleted_pipeline) { pipelines_table.create!(default_attributes) }
|
||||
let!(:other_pipeline) { pipelines_table.create!(default_attributes) }
|
||||
|
||||
let!(:regular_variable) do
|
||||
variables_table.create!(pipeline_id: regular_pipeline.id, key: :key1, **default_attributes)
|
||||
end
|
||||
|
||||
let!(:orphaned_variable) do
|
||||
variables_table.create!(pipeline_id: deleted_pipeline.id, key: :key2, **default_attributes)
|
||||
end
|
||||
|
||||
let(:connection) { Ci::ApplicationRecord.connection }
|
||||
|
||||
around do |example|
|
||||
connection.transaction do
|
||||
connection.execute(<<~SQL)
|
||||
ALTER TABLE ci_pipelines DISABLE TRIGGER ALL;
|
||||
SQL
|
||||
|
||||
example.run
|
||||
|
||||
connection.execute(<<~SQL)
|
||||
ALTER TABLE ci_pipelines ENABLE TRIGGER ALL;
|
||||
SQL
|
||||
end
|
||||
end
|
||||
|
||||
describe '#perform' do
|
||||
subject(:migration) do
|
||||
described_class.new(
|
||||
start_id: variables_table.minimum(:pipeline_id),
|
||||
end_id: variables_table.maximum(:pipeline_id),
|
||||
batch_table: :p_ci_pipeline_variables,
|
||||
batch_column: :pipeline_id,
|
||||
sub_batch_size: 100,
|
||||
pause_ms: 0,
|
||||
connection: connection
|
||||
)
|
||||
end
|
||||
|
||||
it 'deletes from p_ci_pipeline_variables where pipeline_id has no related', :aggregate_failures do
|
||||
expect { deleted_pipeline.delete }.to not_change { variables_table.count }
|
||||
|
||||
expect { migration.perform }.to change { variables_table.count }.from(2).to(1)
|
||||
|
||||
expect(regular_variable.reload).to be_persisted
|
||||
expect { orphaned_variable.reload }.to raise_error(ActiveRecord::RecordNotFound)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe QueueDeleteOrphanedPipelineVariableRecords, migration: :gitlab_ci, feature_category: :continuous_integration do
|
||||
let!(:batched_migration) { described_class::MIGRATION }
|
||||
|
||||
it 'schedules a new batched migration' do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
expect(batched_migration).to have_scheduled_batched_migration(
|
||||
table_name: :p_ci_pipeline_variables,
|
||||
column_name: :pipeline_id,
|
||||
interval: described_class::DELAY_INTERVAL,
|
||||
batch_size: described_class::BATCH_SIZE,
|
||||
sub_batch_size: described_class::SUB_BATCH_SIZE,
|
||||
gitlab_schema: :gitlab_ci
|
||||
)
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -792,21 +792,6 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont
|
|||
.and change(repository, :status_updated_at).from(nil).to(Time.zone.now)
|
||||
.and change(repository, :next_delete_attempt_at).to(nil)
|
||||
end
|
||||
|
||||
context 'when the feature set_delete_failed_container_repository is disabled' do
|
||||
before do
|
||||
stub_feature_flags(set_delete_failed_container_repository: false)
|
||||
end
|
||||
|
||||
it 'updates deletion status attributes', :freeze_time do
|
||||
expect { subject }.to change(repository, :status).from(nil).to('delete_ongoing')
|
||||
.and change(repository, :delete_started_at).from(nil).to(Time.zone.now)
|
||||
.and change(repository, :status_updated_at).from(nil).to(Time.zone.now)
|
||||
.and not_change(repository, :next_delete_attempt_at)
|
||||
|
||||
expect(repository.updated_at).to eq(Time.zone.now)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#set_delete_scheduled_status', :freeze_time do
|
||||
|
|
@ -842,21 +827,6 @@ RSpec.describe ContainerRepository, :aggregate_failures, feature_category: :cont
|
|||
|
||||
expect(repository.status_updated_at).to eq(Time.zone.now)
|
||||
end
|
||||
|
||||
context 'when the feature set_delete_failed_container_repository is disabled' do
|
||||
before do
|
||||
stub_feature_flags(set_delete_failed_container_repository: false)
|
||||
end
|
||||
|
||||
it 'updates delete attributes' do
|
||||
expect { subject }.to change(repository, :status).from('delete_ongoing').to('delete_scheduled')
|
||||
.and change(repository, :delete_started_at).to(nil)
|
||||
.and not_change(repository, :failed_deletion_count)
|
||||
.and not_change(repository, :next_delete_attempt_at)
|
||||
|
||||
expect(repository.status_updated_at).to eq(Time.zone.now)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -58,27 +58,6 @@ RSpec.describe ContainerRegistry::DeleteContainerRepositoryWorker, :aggregate_fa
|
|||
end
|
||||
end
|
||||
|
||||
shared_examples 'setting the status to delete_scheduled regardless of failed_deletion_count' do
|
||||
let(:set_status_method) { :set_delete_scheduled_status }
|
||||
let(:status_after_execution) { 'delete_scheduled' }
|
||||
|
||||
context 'when the failed_deletion_count is less than the max' do
|
||||
before do
|
||||
container_repository.update!(failed_deletion_count: ContainerRepository::MAX_DELETION_FAILURES - 1)
|
||||
end
|
||||
|
||||
it_behaves_like 'not deleting the repository and setting the correct status'
|
||||
end
|
||||
|
||||
context 'when the failed_deletion_count has reached the max' do
|
||||
before do
|
||||
container_repository.update!(failed_deletion_count: ContainerRepository::MAX_DELETION_FAILURES)
|
||||
end
|
||||
|
||||
it_behaves_like 'not deleting the repository and setting the correct status'
|
||||
end
|
||||
end
|
||||
|
||||
it 'picks and destroys the next container repository for destruction' do
|
||||
expect_next_pending_destruction_container_repository do |repo|
|
||||
expect_logs_on(repo, tags_size_before_delete: 100, deleted_tags_size: 0)
|
||||
|
|
@ -108,28 +87,12 @@ RSpec.describe ContainerRegistry::DeleteContainerRepositoryWorker, :aggregate_fa
|
|||
let(:cleanup_tags_service_response) { { status: :error, original_size: 100, deleted_size: 0 } }
|
||||
|
||||
it_behaves_like 'setting the correct status based on failed_deletion_count'
|
||||
|
||||
context 'when the feature set_delete_failed_container_repository is disabled' do
|
||||
before do
|
||||
stub_feature_flags(set_delete_failed_container_repository: false)
|
||||
end
|
||||
|
||||
it_behaves_like 'setting the status to delete_scheduled regardless of failed_deletion_count'
|
||||
end
|
||||
end
|
||||
|
||||
context 'with tags left to destroy' do
|
||||
let(:tags_count) { 10 }
|
||||
|
||||
it_behaves_like 'setting the correct status based on failed_deletion_count'
|
||||
|
||||
context 'when the feature set_delete_failed_container_repository is disabled' do
|
||||
before do
|
||||
stub_feature_flags(set_delete_failed_container_repository: false)
|
||||
end
|
||||
|
||||
it_behaves_like 'setting the status to delete_scheduled regardless of failed_deletion_count'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -153,14 +116,6 @@ RSpec.describe ContainerRegistry::DeleteContainerRepositoryWorker, :aggregate_fa
|
|||
end
|
||||
|
||||
it_behaves_like 'setting the correct status based on failed_deletion_count'
|
||||
|
||||
context 'when the feature set_delete_failed_container_repository is disabled' do
|
||||
before do
|
||||
stub_feature_flags(set_delete_failed_container_repository: false)
|
||||
end
|
||||
|
||||
it_behaves_like 'setting the status to delete_scheduled regardless of failed_deletion_count'
|
||||
end
|
||||
end
|
||||
|
||||
context 'with no tags on the container repository' do
|
||||
|
|
|
|||
Loading…
Reference in New Issue