Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-09-16 21:07:37 +00:00
parent f7c8ef61c9
commit 6b73cf95a8
42 changed files with 363 additions and 345 deletions

View File

@ -1,3 +1,8 @@
/**
* These hljs css overwrite styling from 'highlight.js/lib/core'
* Do not remove unless removing the library
**/
.code.highlight {
.hljs-comment {
color: var(--color-hljs-comment);

View File

@ -12,12 +12,15 @@ module Ci
alias_attribute :runner_manager_id, :runner_machine_id
before_validation :ensure_project_id, on: :create
belongs_to :build, inverse_of: :runner_manager_build, class_name: 'Ci::Build'
belongs_to :runner_manager, foreign_key: :runner_machine_id, inverse_of: :runner_manager_builds,
class_name: 'Ci::RunnerManager'
validates :build, presence: true
validates :runner_manager, presence: true
validates :project_id, presence: true, on: :create
scope :for_build, ->(build_id) { where(build_id: build_id) }
@ -26,5 +29,11 @@ module Ci
.pluck(:build_id, :runner_manager_id)
.to_h
end
private
def ensure_project_id
self.project_id ||= build&.project_id
end
end
end

View File

@ -17,7 +17,7 @@ module Groups
@group.name ||= @group.path.dup
create_chat_team
Namespace.with_disabled_organization_validation { create_group }
create_group
return error_response unless @group.persisted?

View File

@ -39,7 +39,7 @@ module Groups
group.assign_attributes(params)
begin
success = Namespace.with_disabled_organization_validation { group.save }
success = group.save
after_update if success

View File

@ -13,9 +13,7 @@ module Users
user = build_class.new(current_user, params).execute
reset_token = user.generate_reset_token if user.recently_sent_password_reset?
Namespace.with_disabled_organization_validation do
after_create_hook(user, reset_token) if user.save
end
after_create_hook(user, reset_token) if user.save
user
end

View File

@ -0,0 +1,32 @@
- title: 'Replace namespace `add_on_purchase` GraphQL field with `add_on_purchases`'
removal_milestone: '18.0'
announcement_milestone: '17.5'
breaking_change: true
reporter: lwanko
stage:
issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/489850
impact: low
scope: # Can be one or a combination of: [instance, group, project]
resolution_role: Developer
manual_task: true
body: | # (required) Don't change this line.
The namespace GraphQL field `add_on_purchase` will be deprecated in GitLab 17.5 and removed in GitLab 18.0. Use the root `add_on_purchases` field instead.
# ==============================
# OPTIONAL END-OF-SUPPORT FIELDS
# ==============================
#
# If an End of Support period applies:
# 1) Share this announcement in the `#spt_managers` Support channel in Slack
# 2) Mention `@gitlab-com/support` in this merge request.
#
# When support for this feature ends, in XX.YY milestone format.
end_of_support_milestone:
# Array of tiers the feature is currently available to,
# like [Free, Silver, Gold, Core, Premium, Ultimate]
tiers:
# Links to documentation and thumbnail image
documentation_url: https://docs.gitlab.com/ee/api/graphql/reference/#namespace
image_url:
# Use the youtube thumbnail URL with the structure of https://img.youtube.com/vi/UNIQUEID/hqdefault.jpg
video_url:

View File

@ -0,0 +1,9 @@
---
migration_job_name: BackfillPCiRunnerMachineBuildsProjectId
description: Backfills sharding key `p_ci_runner_machine_builds.project_id` from `p_ci_builds`.
feature_category: runner
introduced_by_url: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/164386'
milestone: '17.4'
queued_migration_version: 20240829160537
finalize_after: '2024-10-15'
finalized_by: # version of the migration that finalized this BBM

View File

@ -4,7 +4,7 @@ classes:
feature_categories:
- fleet_visibility
description: Relationships between builds and runner managers
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/111476
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/111484
milestone: '15.9'
gitlab_schema: gitlab_ci
desired_sharding_key:
@ -17,3 +17,4 @@ desired_sharding_key:
sharding_key: project_id
belongs_to: build
foreign_key_name: fk_bb490f12fe_p
desired_sharding_key_migration_job_name: BackfillPCiRunnerMachineBuildsProjectId

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddProjectIdToPCiRunnerManagerBuilds < Gitlab::Database::Migration[2.2]
milestone '17.4'
def change
add_column(:p_ci_runner_machine_builds, :project_id, :bigint)
end
end

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class IndexPCiRunnerMachineBuildsOnProjectId < Gitlab::Database::Migration[2.2]
include Gitlab::Database::PartitioningMigrationHelpers
milestone '17.4'
disable_ddl_transaction!
TABLE_NAME = :p_ci_runner_machine_builds
PARTITIONED_INDEX_NAME = :index_p_ci_runner_machine_builds_on_project_id
def up
add_concurrent_partitioned_index(TABLE_NAME, :project_id, name: PARTITIONED_INDEX_NAME)
end
def down
remove_concurrent_partitioned_index_by_name(TABLE_NAME, PARTITIONED_INDEX_NAME)
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddPCiRunnerMachineBuildsProjectIdNullConstraint < Gitlab::Database::Migration[2.2]
milestone '17.4'
disable_ddl_transaction!
TABLE_NAME = :p_ci_runner_machine_builds
COLUMN_NAME = :project_id
CONSTRAINT_NAME = :check_149ee35c38
def up
add_not_null_constraint(TABLE_NAME, COLUMN_NAME, constraint_name: CONSTRAINT_NAME, validate: false)
end
def down
remove_not_null_constraint(TABLE_NAME, COLUMN_NAME, constraint_name: CONSTRAINT_NAME)
end
end

View File

@ -0,0 +1,30 @@
# frozen_string_literal: true
class QueueBackfillPCiRunnerMachineBuildsProjectId < Gitlab::Database::Migration[2.2]
milestone '17.4'
restrict_gitlab_migration gitlab_schema: :gitlab_ci
MIGRATION = "BackfillPCiRunnerMachineBuildsProjectId"
TABLE_NAME = :p_ci_runner_machine_builds
BATCH_COLUMN = :build_id
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 25_000
SUB_BATCH_SIZE = 150
JOB_ARGS = %i[project_id p_ci_builds project_id build_id partition_id]
def up
queue_batched_background_migration(
MIGRATION,
TABLE_NAME,
BATCH_COLUMN,
*JOB_ARGS,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(MIGRATION, TABLE_NAME, BATCH_COLUMN, JOB_ARGS)
end
end

View File

@ -0,0 +1 @@
2bca5d40580b4c3e1e28e7bbcc67d9abc6f77b24af1699a94d6c978adfb7665b

View File

@ -0,0 +1 @@
005f1ad70a51dda06bb3fd04721c7076260004495aec58fa65fe683ccb0651df

View File

@ -0,0 +1 @@
e09fcf4890486f3162d084cfdcb1d651bb8efbaf2bcec74f4fe74b810175e195

View File

@ -0,0 +1 @@
db6ca34705e10326b707e26adca13882d270b76fbdd60370048c205c0dcf4cb5

View File

@ -2615,7 +2615,8 @@ PARTITION BY LIST (partition_id);
CREATE TABLE p_ci_runner_machine_builds (
partition_id bigint NOT NULL,
build_id bigint NOT NULL,
runner_machine_id bigint NOT NULL
runner_machine_id bigint NOT NULL,
project_id bigint
)
PARTITION BY LIST (partition_id);
@ -23389,6 +23390,9 @@ ALTER TABLE ONLY chat_names
ALTER TABLE ONLY chat_teams
ADD CONSTRAINT chat_teams_pkey PRIMARY KEY (id);
ALTER TABLE p_ci_runner_machine_builds
ADD CONSTRAINT check_149ee35c38 CHECK ((project_id IS NOT NULL)) NOT VALID;
ALTER TABLE workspaces
ADD CONSTRAINT check_2a89035b04 CHECK ((personal_access_token_id IS NOT NULL)) NOT VALID;
@ -29409,6 +29413,8 @@ CREATE UNIQUE INDEX index_p_ci_job_annotations_on_partition_id_job_id_name ON ON
CREATE INDEX index_p_ci_job_annotations_on_project_id ON ONLY p_ci_job_annotations USING btree (project_id);
CREATE INDEX index_p_ci_runner_machine_builds_on_project_id ON ONLY p_ci_runner_machine_builds USING btree (project_id);
CREATE INDEX index_p_ci_runner_machine_builds_on_runner_machine_id ON ONLY p_ci_runner_machine_builds USING btree (runner_machine_id);
CREATE INDEX index_packages_build_infos_on_pipeline_id ON packages_build_infos USING btree (pipeline_id);

View File

@ -104,7 +104,8 @@ Backup and restore recreates the entire database, including the indexes.
1. In a [database console](../troubleshooting/postgresql.md#start-a-database-console), rebuild all indexes:
```shell
REINDEX DATABASE gitlabhq_production
SET statement_timeout = 0;
REINDEX DATABASE gitlabhq_production;
```
1. In all nodes, start GitLab.
@ -136,7 +137,8 @@ Backup and restore recreates the entire database, including the indexes.
[database console](../troubleshooting/postgresql.md#start-a-database-console), rebuild all indexes:
```shell
REINDEX DATABASE gitlabhq_production
SET statement_timeout = 0;
REINDEX DATABASE gitlabhq_production;
```
1. If the secondary sites receive traffic from users, then let the read-replica databases catch up
@ -164,7 +166,8 @@ different types of indexes were handled, see the blog post about
1. In a [database console](../troubleshooting/postgresql.md#start-a-database-console), reindex each affected index:
```shell
REINDEX INDEX <index name> CONCURRENTLY
SET statement_timeout = 0;
REINDEX INDEX <index name> CONCURRENTLY;
```
1. After reindexing bad indexes, the collation must be refreshed. To update the system catalog to
@ -204,7 +207,8 @@ different types of indexes were handled, see the blog post about
[database console](../troubleshooting/postgresql.md#start-a-database-console), reindex each affected index:
```shell
REINDEX INDEX <index name> CONCURRENTLY
SET statement_timeout = 0;
REINDEX INDEX <index name> CONCURRENTLY;
```
1. After reindexing bad indexes, the collation must be refreshed. To update the system catalog to

View File

@ -1,226 +1,11 @@
---
status: proposed
creation-date: "2023-02-08"
authors: [ "@mattkasa", "@jon_jenkins" ]
coach: "@DylanGriffith"
approvers: [ "@rogerwoo", "@alexives" ]
owning-stage: "~devops::data stores"
participating-stages: []
redirect_to: 'https://handbook.gitlab.com/handbook/engineering/architecture/design-documents/automated_query_analysis/'
remove_date: '2025-09-10'
---
# Automated Query Analysis
This document was moved to [another location](https://handbook.gitlab.com/handbook/engineering/architecture/design-documents/automated_query_analysis/).
## Problem Summary
Our overarching goal is to improve the reliability and throughput of the GitLab
database review process. The current process requires merge request authors to
manually provide query plans and raw SQL when introducing new queries or
updating existing queries. This is both time consuming and error prone.
We believe we can improve operational efficiency by automatically identifying
and analyzing newly introduced SQL queries. This will reduce the risk of human
error, leading to improved system stability and an overall reduction in
performance regressions.
Our key success metric is a reduction in the number of manual actions required
by both code contributors and database reviewers, while maintaining a consistent
standard for database related code contributions.
## Goals
1. Replace the current process of the author manually obtaining SQL and query
plans with an automated process.
1. Decrease the incidence of performance regressions when poorly performing
queries are missed by a manual process.
1. Increase contributor and reviewer efficiency by automating the query testing
portion of database review.
## Challenges
- Capturing the number of SQL queries generated by an application the size of
`gitlab-org/gitlab` without causing an increase in CI time and/or resources
may present a challenge.
- Storing the number of SQL queries generated by an application the size of
`gitlab-org/gitlab` may consume large amounts of database storage.
## Opportunity
- Automated test suites already generate a large number of SQL queries, for
instance `rspec` test suites, that can be captured and used to perform
automated analysis.
- We already utilize `postgres.ai` to analyze query performance, and it has an
API that will allow us to automate the creation of database clones with
realistic production-like data in order to perform automated analysis.
- For customers who do not use something like `postgres.ai`, but who are
connecting to a test database in CI, we would use this connection to generate
query plans. The accuracy of these query plans will be affected by how
realistic the test data is, and can be improved by seeding the test database
with production-like data.
- By storing queries and their query plans, we can tokenize the query plan into
plan components, assign a cost and weight, then match those against a machine
learning model. We can build this model by generating query plans for queries
in our slow query logs, and assign actual cost and weight to their plan
components. This will allow us to leverage our corpus of queries and slow
query logs to predict the performance of arbitrary query text for other
applications and our customers.
## Proposal
We plan to automate the process of identifying new and changed database queries,
so that contributors and reviewers can more accurately and efficiently assess
the database performance impact of a code change.
We will capture queries generated as a side effect of running tests in CI,
normalize them, deduplicate them, analyze them using one or more analyzers, and
store them with their analyses and other metadata for future retrieval and
comparison.
We will post a comment to the originating merge request, containing a summary of
the new and changed queries, with links to their analyses, and highlighting any
queries that exceed established timing or other performance guidelines.
## Design and implementation details
### Iteration 1
In the first iteration we will focus on how we capture queries, including
normalization, deduplication, and storage. We must consider the performance and
resource impacts on CI pipelines during capture, and include things like
partitioning and time decay for the information we are storing.
#### Capturing queries
We will strive to limit the time and resource impacts on our CI pipelines as
much as possible. These are some of the options we will consider for capturing
queries:
- **Instrumenting `ActiveRecord` in `ruby`**
- Challenges:
- Only applies to `ruby` projects so it would not be applicable to projects
like `container-registry`.
- Has a non-zero impact on time and resources in CI pipelines (these impacts
can be observed in
[!111638](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/111638))
- Opportunities:
- Simple and straightforward to implement.
- Allows access to more information (eg. stacktrace and calling locations).
- **Connection proxy with logging**
- Challenges:
- Adds complexity and possible performance overhead.
- Requires maintaining the code for the proxy.
- Opportunities:
- Allows us to customize the capture.
- Allows us to perform normalization/deduplication at capture time.
- **Built-in logging in `postgresql`**
- Challenges:
- Require adding a configuration to enable logging.
- May be difficult to obtain the resulting logs.
- Opportunities:
- Doesn't require maintaining any code.
- Light weight in terms of performance impact.
- **Capture from `pg_stat_statements`**
- Challenges:
- Requires creating the extension in the test database.
- Requires adding a configuration to set `pg_stat_statements.max` to a value
high enough to capture all queries.
- Consumes shared memory proportional to `pg_stat_statements.max`.
- Opportunities:
- Requires minimal code.
- Simple to obtain the data.
- Data is already normalized.
We have already built a proof of concept for instrumenting `ActiveRecord` in
`ruby` in
[!111638](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/111638), so as a
first step we will benchmark the other capture methods against it and select the
best option.
#### Storing queries
For the next step of the first iteration we will use the proof of concept in
[!111638](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/111638) as well
as any data gathered from testing other capture methods to estimate the number
of rows per project, and use the pipeline execution statistics for
`gitlab-org/gitlab` to estimate throughput. These estimates will allow us to
evaluate storage mechanisms that are suitable for our purpose.
Some of the storage mechanisms we plan to evaluate are:
- **In the `ci` database in the GitLab database instance**
- Challenges:
- Places additional strain on this resource for `GitLab.com`.
- Opportunities:
- Allows us to utilize existing authentication and access control in the form of `CI_JOB_TOKEN`.
- Allows us to leverage associations with `ci_builds` and `ci_pipelines`.
- Simplifies deployment for self-managed.
- **In a new decomposed database in the GitLab database instance**
- Challenges:
- Adds to required development and testing effort.
- Adds to deployment effort for `GitLab.com`.
- Opportunities:
- Isolates database performance impacts from the existing `main` and `ci` database instances.
- **In a new external service**
- Challenges:
- Adds to required development and testing effort.
- Adds to deployment effort for `GitLab.com` and for self-managed.
- Opportunities:
- Isolates performance impacts from `gitlab-org/gitlab`.
- Allows us to iterate faster without impacting the main application.
- **In ClickHouse**
- Challenges:
- Not yet available for self-managed.
- Opportunities:
- Isolates database performance impacts from the existing `main` and `ci` database instances.
An example database schema for storing queries:
```sql
CREATE TABLE queries (
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
id bigint NOT NULL,
project_id bigint NOT NULL,
analysis_id bigint,
hash text,
sql text
);
CREATE TABLE pipeline_queries (
id bigint NOT NULL,
project_id bigint NOT NULL,
pipeline_id bigint NOT NULL,
query_id bigint NOT NULL
);
CREATE TABLE analyses (
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
id bigint NOT NULL,
project_id bigint NOT NULL,
query_id bigint NOT NULL,
buffers int,
walltime int,
explain text,
analysis_url text
);
```
One possible method of partitioning a schema like the above example would be to
utilize
[sub-partitioning](https://github.com/pgpartman/pg_partman/blob/master/doc/pg_partman.md#sub-partitioning).
If we partition by `project_id` then by some interval of `updated_at`, and touch
the row when we see a query, we can store only queries that the codebase is
still executing, and prune partitions that only contain queries the code is no
longer generating.
### Iteration 2
In the second iteration we plan to identify new and changed queries, and post MR
comments containing a summary. We will begin soliciting feedback on the accuracy
and utility of the information, and improve or filter it to maximize it's
usefulness.
### Iteration 3+
In the third and following iterations we plan to automate query analysis using
one or more analyzers, store these analyses, and add them to the MR comments. We
also intend to re-evaluate our use of the database to store query information,
and the API to retrieve it, and potentially move this to an external service.
<!-- This redirect file can be deleted after <2025-09-10>. -->
<!-- Redirects that point to other docs in the same project expire in three months. -->
<!-- Redirects that point to docs in a different project or site (for example, link is not relative and starts with `https:`) expire in one year. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/redirects.html -->

View File

@ -638,6 +638,22 @@ A new `maxretries` parameter has been added to control how many times an event w
<div class="deprecation breaking-change" data-milestone="18.0">
### Replace namespace `add_on_purchase` GraphQL field with `add_on_purchases`
<div class="deprecation-notes">
- Announced in GitLab <span class="milestone">17.5</span>
- Removal in GitLab <span class="milestone">18.0</span> ([breaking change](https://docs.gitlab.com/ee/update/terminology.html#breaking-change))
- To discuss this change or learn more, see the [deprecation issue](https://gitlab.com/gitlab-org/gitlab/-/issues/489850).
</div>
The namespace GraphQL field `add_on_purchase` will be deprecated in GitLab 17.5 and removed in GitLab 18.0. Use the root `add_on_purchases` field instead.
</div>
<div class="deprecation breaking-change" data-milestone="18.0">
### Runner `active` GraphQL fields replaced by `paused`
<div class="deprecation-notes">

View File

@ -859,6 +859,7 @@ The following variables configure the behavior of specific dependency scanning a
| `MAVEN_CLI_OPTS` | `gemnasium-maven` | `"-DskipTests --batch-mode"` | List of command line arguments that are passed to `maven` by the analyzer. See an example for [using private repositories](../index.md#using-private-maven-repositories). |
| `GRADLE_CLI_OPTS` | `gemnasium-maven` | | List of command line arguments that are passed to `gradle` by the analyzer. |
| `GRADLE_PLUGIN_INIT_PATH` | `gemnasium-maven` | `"gemnasium-init.gradle"` | Specifies the path to the Gradle initialization script. The init script must include `allprojects { apply plugin: 'project-report' }` to ensure compatibility. |
| `DS_GRADLE_RESOLUTION_POLICY` | `gemnasium-maven` | `"failed"` | Controls Gradle dependency resolution strictness. Accepts `"none"` to allow partial results, or `"failed"` to fail the scan when any dependencies fail to resolve. |
| `SBT_CLI_OPTS` | `gemnasium-maven` | | List of command-line arguments that the analyzer passes to `sbt`. |
| `PIP_INDEX_URL` | `gemnasium-python` | `https://pypi.org/simple` | Base URL of Python Package Index. |
| `PIP_EXTRA_INDEX_URL` | `gemnasium-python` | | Array of [extra URLs](https://pip.pypa.io/en/stable/reference/pip_install/#cmdoption-extra-index-url) of package indexes to use in addition to `PIP_INDEX_URL`. Comma-separated. **Warning:** Read [the following security consideration](#python-projects) when using this environment variable. |

View File

@ -187,14 +187,23 @@ gemnasium-python-dependency_scanning:
This error can occur when the automatically generated `CI_JOB_TOKEN` starts with a hyphen (`-`).
To avoid this error, follow [Poetry's configuration advice](https://python-poetry.org/docs/repositories/#configuring-credentials).
## Error: Project has `<number>` unresolved dependencies
## Error: project has unresolved dependencies
The following error messages indicate a Gradle dependency resolution issue
caused by your `build.gradle` or `build.gradle.kts` file:
- `Project has <number> unresolved dependencies` (GitLab 16.7 to 16.9)
- `project has unresolved dependencies: ["dependency_name:version"]` (GitLab 17.0 and later)
The error message `Project has <number> unresolved dependencies` indicates a dependency resolution problem caused by your `gradle.build` or `gradle.build.kts` file.
In GitLab 16.7 to 16.9, `gemnasium-maven` cannot continue processing when an unresolved dependency is encountered.
Consult the [Gradle dependency resolution documentation](https://docs.gradle.org/current/userguide/dependency_resolution.html)
for details on how to fix your `gradle.build` file.
More details can be found in [epic 12361](https://gitlab.com/groups/gitlab-org/-/epics/12361)
and [issue 437278](https://gitlab.com/gitlab-org/gitlab/-/issues/437278).
In GitLab 17.0 and later, `gemnasium-maven` supports the `DS_GRADLE_RESOLUTION_POLICY` environment variable which you can use to control how unresolved dependencies are handled. By default, the scan fails when unresolved dependencies are encountered. However, you can set the environment variable `DS_GRADLE_RESOLUTION_POLICY` to `"none"` to allow the scan to continue and produce partial results.
Consult the [Gradle dependency resolution documentation](https://docs.gradle.org/current/userguide/dependency_resolution.html) for guidance on
fixing your `build.gradle` file. For more details, refer to [issue 482650](https://gitlab.com/gitlab-org/gitlab/-/issues/482650).
Additionally, there is a known issue in `Kotlin 2.0.0` affecting dependency resolution, which is scheduled to be fixed in `Kotlin 2.0.20`.
For more information, refer to [this issue](https://github.com/gradle/github-dependency-graph-gradle-plugin/issues/140#issuecomment-2230255380).
## Setting build constraints when scanning Go projects

View File

@ -123,6 +123,9 @@ To resolve the vulnerability:
1. Select outside the filter field. The vulnerability severity totals and list of matching vulnerabilities are updated.
1. Select the SAST vulnerability you want resolved.
1. In the upper-right corner, select **Resolve with AI**.
1. Add an additional commit to the MR. This forces a new pipeline to run.
1. After the pipeline is complete, on the [pipeline security tab](../vulnerability_report/pipeline.md#view-vulnerabilities-in-a-pipeline), confirm that the vulnerability no longer appears.
1. On the vulnerability report, [manually update the vulnerability](../vulnerability_report/index.md#change-status-of-vulnerabilities).
A merge request containing the AI remediation suggestions is opened. Review the suggested changes,
then process the merge request according to your standard workflow.

View File

@ -232,6 +232,15 @@ refreshed.
![Project Vulnerability Report](img/project_security_dashboard_status_change_v16_0.png)
### Update a resolved vulnerability
When a vulnerability is resolved by a merge request, it still appears in the vulnerability report.
To update the vulnerability:
1. Confirm that the resolved vulnerability has the status **no longer detected**. The status of a vulnerability is displayed on the [activity filter](../vulnerability_report/index.md#activity-filter) and the vulnerability record.
1. Change the status of the vulnerability to **Resolved**.
## Sort vulnerabilities by date detected
By default, vulnerabilities are sorted by severity level, with the highest-severity vulnerabilities listed at the top.

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillPCiRunnerMachineBuildsProjectId < BackfillDesiredShardingKeyPartitionJob
operation_name :backfill_p_ci_runner_machine_builds_project_id
feature_category :fleet_visibility
end
end
end

View File

@ -84,7 +84,7 @@ module Gitlab
name: _('Container Scanning For Registry'),
description: _('Run container scanning job whenever a container image with the latest tag is pushed.'),
help_path: Gitlab::Routing.url_helpers.help_page_path(
'user/application_security/continuous_vulnerability_scanning/index'),
'user/application_security/container_scanning/index', anchor: 'container-scanning-for-registry'),
type: 'container_scanning_for_registry'
},
pre_receive_secret_detection: {

View File

@ -73,8 +73,8 @@
"@gitlab/favicon-overlay": "2.0.0",
"@gitlab/fonts": "^1.3.0",
"@gitlab/query-language": "^0.0.5-a-20240903",
"@gitlab/ui": "91.13.0",
"@gitlab/svgs": "3.116.0",
"@gitlab/ui": "92.2.0",
"@gitlab/web-ide": "^0.0.1-dev-20240909013227",
"@mattiasbuelens/web-streams-adapter": "^0.1.0",
"@rails/actioncable": "7.0.8-4",

View File

@ -2,11 +2,11 @@
require 'spec_helper'
RSpec.describe GroupsController, factory_default: :keep, feature_category: :code_review_workflow do
RSpec.describe GroupsController, :with_current_organization, factory_default: :keep, feature_category: :code_review_workflow do
include ExternalAuthorizationServiceHelpers
include AdminModeHelper
let_it_be(:group_organization) { create(:organization) }
let_it_be(:group_organization) { current_organization }
let_it_be_with_refind(:group) { create_default(:group, :public, organization: group_organization) }
let_it_be_with_refind(:project) { create(:project, namespace: group) }
let_it_be(:user) { create(:user) }
@ -18,6 +18,10 @@ RSpec.describe GroupsController, factory_default: :keep, feature_category: :code
let_it_be(:developer) { group.add_developer(create(:user)).user }
let_it_be(:guest) { group.add_guest(create(:user)).user }
before_all do
group_organization.users = User.all
end
before do
enable_admin_mode!(admin_with_admin_mode)
end
@ -240,7 +244,7 @@ RSpec.describe GroupsController, factory_default: :keep, feature_category: :code
context 'authorization' do
it 'allows an admin to create a group' do
sign_in(create(:admin))
sign_in(admin_without_admin_mode)
expect do
post :create, params: { group: { name: 'new_group', path: 'new_group' } }
@ -301,10 +305,9 @@ RSpec.describe GroupsController, factory_default: :keep, feature_category: :code
end
end
context 'when creating a top level group', :with_current_organization do
context 'when creating a top level group' do
before do
sign_in(developer)
Current.organization.users << developer
end
context 'and can_create_group is enabled' do

View File

@ -89,6 +89,7 @@ RSpec.describe 'Database schema', feature_category: :database do
ci_pipeline_variables: %w[partition_id pipeline_id project_id],
ci_pipelines: %w[partition_id auto_canceled_by_partition_id],
p_ci_pipelines: %w[partition_id auto_canceled_by_partition_id auto_canceled_by_id],
p_ci_runner_machine_builds: %w[project_id],
ci_runner_projects: %w[runner_id],
ci_sources_pipelines: %w[partition_id source_partition_id source_job_id],
ci_sources_projects: %w[partition_id],

View File

@ -2,10 +2,14 @@
require 'spec_helper'
RSpec.describe 'Dashboard Group', :js, feature_category: :groups_and_projects do
RSpec.describe 'Dashboard Group', :with_current_organization, :js, feature_category: :groups_and_projects do
let(:user) { create(:user) }
let(:group) { create(:group) }
before do
current_organization.users << user
end
context 'when user has no groups' do
before do
sign_in(user)

View File

@ -2,10 +2,10 @@
require 'spec_helper'
RSpec.describe 'Upload a group export archive', :api, :js, feature_category: :groups_and_projects do
RSpec.describe 'Upload a group export archive', :with_current_organization, :api, :js, feature_category: :groups_and_projects do
include_context 'file upload requests helpers'
let_it_be(:user) { create(:user, :admin) }
let_it_be(:user) { create(:user, :admin, organizations: [current_organization]) }
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
let(:api_path) { '/groups/import' }

View File

@ -52,7 +52,7 @@ RSpec.describe Gitlab::Auth::Ldap::User do
describe '#valid_sign_in?' do
before do
Namespace.with_disabled_organization_validation { gl_user.save! }
gl_user.save!
end
it 'returns true' do

View File

@ -31,10 +31,6 @@ RSpec.describe Gitlab::Auth::OAuth::User, aggregate_failures: true, feature_cate
let(:ldap_user) { Gitlab::Auth::Ldap::Person.new(Net::LDAP::Entry.new, 'ldapmain') }
let(:ldap_user_2) { Gitlab::Auth::Ldap::Person.new(Net::LDAP::Entry.new, 'ldapmain') }
around do |example|
Namespace.with_disabled_organization_validation { example.run }
end
describe '.find_by_uid_and_provider' do
let(:provider) { 'provider' }
let(:uid) { 'uid' }

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillPCiRunnerMachineBuildsProjectId,
migration: :gitlab_ci, feature_category: :fleet_visibility do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :p_ci_runner_machine_builds }
let(:batch_column) { :build_id }
let(:backfill_column) { :project_id }
let(:backfill_via_table) { :p_ci_builds }
let(:backfill_via_column) { :project_id }
let(:backfill_via_foreign_key) { :build_id }
let(:partition_column) { :partition_id }
end
end

View File

@ -0,0 +1,55 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillPCiRunnerMachineBuildsProjectId, migration: :gitlab_ci, feature_category: :fleet_visibility do
let!(:batched_migration) { described_class::MIGRATION }
let(:expected_job_args) { %i[project_id p_ci_builds project_id build_id partition_id] }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: described_class::TABLE_NAME,
column_name: :build_id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_ci,
job_arguments: expected_job_args
)
}
end
end
context 'when executed on .com' do
before do
allow(Gitlab).to receive(:com_except_jh?).and_return(true)
end
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: described_class::TABLE_NAME,
column_name: :build_id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_ci,
job_arguments: expected_job_args
)
}
end
end
end
end

View File

@ -58,7 +58,7 @@ RSpec.describe Ci::RunnerManagerBuild, model: true, feature_category: :fleet_vis
it { expect(partitioning_strategy.active_partition).to be_present }
end
context 'loose foreign key on p_ci_runner_manager_builds.runner_manager_id' do # rubocop:disable RSpec/ContextWording
context 'with loose foreign key on p_ci_runner_manager_builds.runner_manager_id' do
it_behaves_like 'cleanup by a loose foreign key' do
let!(:parent) { create(:ci_runner_machine) }
let!(:model) { create(:ci_runner_machine_build, runner_manager: parent) }
@ -81,7 +81,7 @@ RSpec.describe Ci::RunnerManagerBuild, model: true, feature_category: :fleet_vis
it { is_expected.to eq(described_class.where(build_id: build_id)) }
end
context 'with non-existeng build_id' do
context 'with non-existing build_id' do
let(:build_id) { non_existing_record_id }
it { is_expected.to be_empty }
@ -103,4 +103,23 @@ RSpec.describe Ci::RunnerManagerBuild, model: true, feature_category: :fleet_vis
it { is_expected.to be_empty }
end
end
describe '#ensure_project_id' do
it 'sets the project_id before validation' do
runner_machine_build = FactoryBot.build(:ci_runner_machine_build, build: build)
expect do
runner_machine_build.validate!
end.to change { runner_machine_build.project_id }.from(nil).to(runner_machine_build.build.project.id)
end
it 'does not override the project_id if set' do
another_project = create(:project)
runner_machine_build = FactoryBot.build(:ci_runner_machine_build, project_id: another_project.id)
expect do
runner_machine_build.validate!
end.not_to change { runner_machine_build.project_id }.from(another_project.id)
end
end
end

View File

@ -130,9 +130,10 @@ RSpec.describe PipelineSerializer, feature_category: :continuous_integration do
it 'preloads related merge requests' do
recorded = ActiveRecord::QueryRecorder.new { subject }
expected_query = "FROM \"merge_requests\" WHERE \"merge_requests\".\"id\" IN (#{merge_request_1.id}, #{merge_request_2.id})"
id_pattern = /#{merge_request_1.id}, #{merge_request_2.id}|#{merge_request_2.id}, #{merge_request_1.id}/
expected_query = /FROM "merge_requests" WHERE "merge_requests"\."id" IN \(#{id_pattern}\)/
expect(recorded.log).to include(a_string_including(expected_query))
expect(recorded.log).to include(a_string_matching(expected_query))
end
end

View File

@ -4,8 +4,13 @@ require 'spec_helper'
RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_projects do
let_it_be(:user, reload: true) { create(:user) }
let_it_be(:organization) { create(:organization, users: [user]) }
let(:current_user) { user }
let(:group_params) { { path: 'group_path', visibility_level: Gitlab::VisibilityLevel::PUBLIC }.merge(extra_params) }
let(:group_params) do
{ path: 'group_path', visibility_level: Gitlab::VisibilityLevel::PUBLIC,
organization_id: organization.id }.merge(extra_params)
end
let(:extra_params) { {} }
let(:created_group) { response[:group] }
@ -220,11 +225,11 @@ RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_
end
end
context 'when organization is not set by params', :with_current_organization do
context 'when organization is not set by params' do
context 'and the parent of the group has an organization' do
let_it_be(:parent_group) { create(:group, organization: other_organization) }
let(:extra_params) { { parent_id: parent_group.id } }
let(:group_params) { { path: 'with-parent', parent_id: parent_group.id } }
it 'creates group with the parent group organization' do
expect(created_group.organization).to eq(other_organization)
@ -232,26 +237,18 @@ RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_
end
end
context 'when organization_id is set to nil' do
context 'when organization_id is not specified' do
let_it_be(:default_organization) { create(:organization, :default) }
let(:extra_params) { { organization_id: nil } }
let(:group_params) { { path: 'group_path' } }
it 'creates group in default organization' do
expect(created_group.organization).to eq(default_organization)
end
end
context 'when organization is not set at all' do
it 'creates group without an organization' do
expect(created_group.organization).to eq(nil)
# let db default happen even if the organization record itself doesn't exist
expect(created_group.organization_id).not_to be_nil
end
end
end
context 'for a subgroup' do
let_it_be(:group) { create(:group) }
let_it_be(:group) { create(:group, organization: organization) }
let(:extra_params) { { parent_id: group.id } }
context 'as group owner' do
@ -337,7 +334,7 @@ RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_
context 'when there is a group-level exclusion' do
let(:extra_params) { { parent_id: group.id } }
let_it_be(:group) { create(:group) { |g| g.add_owner(user) } }
let_it_be(:group) { create(:group, organization: organization) { |g| g.add_owner(user) } }
let_it_be(:group_integration) do
create(:beyond_identity_integration, group: group, instance: false, active: false)
end
@ -363,7 +360,7 @@ RSpec.describe Groups::CreateService, '#execute', feature_category: :groups_and_
context 'with an active group-level integration' do
let(:extra_params) { { parent_id: group.id } }
let_it_be(:group) { create(:group) { |g| g.add_owner(user) } }
let_it_be(:group) { create(:group, organization: organization) { |g| g.add_owner(user) } }
let_it_be(:group_integration) do
create(:prometheus_integration, :group, group: group, api_url: 'https://prometheus.group.com/')
end

View File

@ -4,17 +4,19 @@ require 'spec_helper'
RSpec.describe Users::CreateService, feature_category: :user_management do
describe '#execute' do
let_it_be(:organization) { create(:organization) }
let(:password) { User.random_password }
let(:admin_user) { create(:admin) }
let(:email) { 'jd@example.com' }
let(:base_params) do
{ name: 'John Doe', username: 'jduser', email: email, password: password, organization_id: organization.id }
end
context 'with an admin user' do
let(:service) { described_class.new(admin_user, params) }
let(:email) { 'jd@example.com' }
context 'when required parameters are provided' do
let(:params) do
{ name: 'John Doe', username: 'jduser', email: email, password: password }
end
let(:params) { base_params }
it 'returns a persisted user' do
expect(service.execute).to be_persisted
@ -88,9 +90,7 @@ RSpec.describe Users::CreateService, feature_category: :user_management do
end
context 'when force_random_password parameter is true' do
let(:params) do
{ name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: password, force_random_password: true }
end
let(:params) { base_params.merge(force_random_password: true) }
it 'generates random password' do
user = service.execute
@ -101,15 +101,7 @@ RSpec.describe Users::CreateService, feature_category: :user_management do
end
context 'when password_automatically_set parameter is true' do
let(:params) do
{
name: 'John Doe',
username: 'jduser',
email: 'jd@example.com',
password: password,
password_automatically_set: true
}
end
let(:params) { base_params.merge(password_automatically_set: true) }
it 'persists the given attributes' do
user = service.execute
@ -127,9 +119,7 @@ RSpec.describe Users::CreateService, feature_category: :user_management do
end
context 'when skip_confirmation parameter is true' do
let(:params) do
{ name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: password, skip_confirmation: true }
end
let(:params) { base_params.merge(skip_confirmation: true) }
it 'confirms the user' do
expect(service.execute).to be_confirmed
@ -137,9 +127,7 @@ RSpec.describe Users::CreateService, feature_category: :user_management do
end
context 'when reset_password parameter is true' do
let(:params) do
{ name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: password, reset_password: true }
end
let(:params) { base_params.merge(reset_password: true) }
it 'resets password even if a password parameter is given' do
expect(service.execute).to be_recently_sent_password_reset
@ -158,9 +146,7 @@ RSpec.describe Users::CreateService, feature_category: :user_management do
end
context 'with nil user' do
let(:params) do
{ name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: password, skip_confirmation: true }
end
let(:params) { base_params.merge(skip_confirmation: true) }
let(:service) { described_class.new(nil, params) }

View File

@ -23,6 +23,4 @@
- spec/lib/gitlab/auth/saml/user_spec.rb
- spec/models/hooks/system_hook_spec.rb
- spec/requests/api/groups_spec.rb
- spec/services/users/create_service_spec.rb
- spec/services/groups/create_service_spec.rb
- spec/services/resource_access_tokens/create_service_spec.rb

View File

@ -32,18 +32,6 @@ RSpec.shared_examples 'desired sharding key backfill job' do
},
vulnerability_flags: {
vulnerability_occurrences: 'https://gitlab.com/gitlab-org/gitlab/-/issues/480354'
},
dast_site_validations: {
dast_site_tokens: 'https://gitlab.com/gitlab-org/gitlab/-/issues/474985'
},
dast_site_profile_secret_variables: {
dast_site_profiles: 'https://gitlab.com/gitlab-org/gitlab/-/issues/480014'
},
dast_site_profiles_builds: {
dast_site_profiles: 'https://gitlab.com/gitlab-org/gitlab/-/issues/480014'
},
dast_pre_scan_verifications: {
dast_profiles: 'https://gitlab.com/gitlab-org/gitlab/-/issues/480014'
}
}
end

View File

@ -1362,10 +1362,10 @@
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-3.116.0.tgz#281b8c4f12843172f460028b6e624e57d65496a2"
integrity sha512-mbOu8mI/vDqGbo+HGyZ0a4F1g/TGRbRlxBqAXNtz42KX8P8rrLERxRjjV1P/vrLCIC744hhrZ8zSTCNGGcyQPA==
"@gitlab/ui@91.13.0":
version "91.13.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-91.13.0.tgz#d431f1151913141a9d1d27ecb9905a5f16966f29"
integrity sha512-+8g33OSzueqHUFyIrJposcL0JbcvvoYY19Qr5HFrG9Dvm3WBpNduAA4aZGNMPPJLYnR6MA2iMtk+p/svA/j6Rw==
"@gitlab/ui@92.2.0":
version "92.2.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-92.2.0.tgz#67f7a3b3ccc1973a9d72f0bedac2d9ae9e3a9830"
integrity sha512-urYwjd+ZjCFiLYjAGbgYH/4GcvxvMW+FFm2qUIhJ37RRMdwzQOUik+IexuFEt3d564X8/rltGEAw/uHa3dIIqw==
dependencies:
"@floating-ui/dom" "1.4.3"
echarts "^5.3.2"
@ -13104,16 +13104,7 @@ string-length@^4.0.1:
char-regex "^1.0.2"
strip-ansi "^6.0.0"
"string-width-cjs@npm:string-width@^4.2.0":
version "4.2.3"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
dependencies:
emoji-regex "^8.0.0"
is-fullwidth-code-point "^3.0.0"
strip-ansi "^6.0.1"
"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3:
"string-width-cjs@npm:string-width@^4.2.0", "string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3:
version "4.2.3"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
@ -13166,7 +13157,7 @@ string_decoder@^1.0.0, string_decoder@^1.1.1, string_decoder@~1.1.1:
dependencies:
safe-buffer "~5.1.0"
"strip-ansi-cjs@npm:strip-ansi@^6.0.1":
"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1:
version "6.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
@ -13180,13 +13171,6 @@ strip-ansi@^5.2.0:
dependencies:
ansi-regex "^4.1.0"
strip-ansi@^6.0.0, strip-ansi@^6.0.1:
version "6.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
dependencies:
ansi-regex "^5.0.1"
strip-ansi@^7.0.1, strip-ansi@^7.1.0:
version "7.1.0"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45"
@ -14878,7 +14862,7 @@ worker-loader@^3.0.8:
loader-utils "^2.0.0"
schema-utils "^3.0.0"
"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0":
"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43"
integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==
@ -14896,15 +14880,6 @@ wrap-ansi@^6.2.0:
string-width "^4.1.0"
strip-ansi "^6.0.0"
wrap-ansi@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43"
integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==
dependencies:
ansi-styles "^4.0.0"
string-width "^4.1.0"
strip-ansi "^6.0.0"
wrap-ansi@^8.1.0:
version "8.1.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214"