Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
c8eaa54d71
commit
d2ce6b490c
|
|
@ -453,7 +453,6 @@ Style/IfUnlessModifier:
|
|||
- 'ee/lib/ee/gitlab/quick_actions/epic_actions.rb'
|
||||
- 'ee/lib/ee/gitlab/quick_actions/issue_actions.rb'
|
||||
- 'ee/lib/ee/gitlab/repository_size_checker.rb'
|
||||
- 'ee/lib/ee/sidebars/groups/menus/settings_menu.rb'
|
||||
- 'ee/lib/ee/sidebars/projects/menus/analytics_menu.rb'
|
||||
- 'ee/lib/ee/sidebars/projects/menus/security_compliance_menu.rb'
|
||||
- 'ee/lib/elastic/latest/custom_language_analyzers.rb'
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
import Wikis from '~/pages/shared/wikis/wikis';
|
||||
import { mountApplications } from '~/pages/shared/wikis/edit';
|
||||
import { mountMoreActions } from '~/pages/shared/wikis/more_actions';
|
||||
import { mountWikiSidebarEntries } from '~/pages/shared/wikis/show';
|
||||
|
||||
mountWikiSidebarEntries();
|
||||
mountApplications();
|
||||
mountMoreActions();
|
||||
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ const mountWikiContentApp = () => {
|
|||
});
|
||||
};
|
||||
|
||||
const mountWikiSidebarEntries = () => {
|
||||
export const mountWikiSidebarEntries = () => {
|
||||
const el = document.querySelector('#js-wiki-sidebar-entries');
|
||||
if (!el) return false;
|
||||
|
||||
|
|
@ -93,7 +93,6 @@ const mountWikiSidebarEntries = () => {
|
|||
|
||||
export const mountApplications = () => {
|
||||
mountWikiContentApp();
|
||||
mountWikiSidebarEntries();
|
||||
|
||||
new Wikis(); // eslint-disable-line no-new
|
||||
};
|
||||
|
|
|
|||
|
|
@ -30,7 +30,13 @@ class GraphqlController < ApplicationController
|
|||
protect_from_forgery with: :null_session, only: :execute
|
||||
|
||||
# must come first: current_user is set up here
|
||||
before_action :authenticate_graphql, only: :execute
|
||||
before_action(only: [:execute]) do
|
||||
if Feature.enabled? :graphql_minimal_auth_methods # rubocop:disable Gitlab/FeatureFlagWithoutActor -- reverting MR
|
||||
authenticate_graphql
|
||||
else
|
||||
authenticate_sessionless_user!(:api)
|
||||
end
|
||||
end
|
||||
|
||||
before_action :authorize_access_api!
|
||||
before_action :set_user_last_activity
|
||||
|
|
|
|||
|
|
@ -10,36 +10,13 @@ module Ci
|
|||
|
||||
free_resources = resource_group.resources.free.count
|
||||
|
||||
if free_resources == 0
|
||||
if resource_group.waiting_processables.any?
|
||||
# if the resource group is still 'tied up' in other processables,
|
||||
# and there are more upcoming processables
|
||||
# kick off the worker again for the current resource group
|
||||
respawn_assign_resource_worker(resource_group)
|
||||
end
|
||||
|
||||
return
|
||||
end
|
||||
return if free_resources == 0
|
||||
|
||||
enqueue_upcoming_processables(free_resources, resource_group)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def respawn_assign_resource_worker(resource_group)
|
||||
return if Feature.disabled?(:respawn_assign_resource_worker, project, type: :gitlab_com_derisk)
|
||||
|
||||
assign_resource_from_resource_group(resource_group)
|
||||
end
|
||||
|
||||
def assign_resource_from_resource_group(resource_group)
|
||||
if Feature.enabled?(:assign_resource_worker_deduplicate_until_executing, project)
|
||||
Ci::ResourceGroups::AssignResourceFromResourceGroupWorkerV2.perform_in(RESPAWN_WAIT_TIME, resource_group.id)
|
||||
else
|
||||
Ci::ResourceGroups::AssignResourceFromResourceGroupWorker.perform_in(RESPAWN_WAIT_TIME, resource_group.id)
|
||||
end
|
||||
end
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def enqueue_upcoming_processables(free_resources, resource_group)
|
||||
resource_group.upcoming_processables.take(free_resources).each do |upcoming|
|
||||
|
|
|
|||
|
|
@ -23,9 +23,7 @@ module Packages
|
|||
|
||||
package, package_file = ApplicationRecord.transaction { create_terraform_module_package! }
|
||||
|
||||
if Feature.enabled?(:index_terraform_module_archive, project)
|
||||
::Packages::TerraformModule::ProcessPackageFileWorker.perform_async(package_file.id)
|
||||
end
|
||||
::Packages::TerraformModule::ProcessPackageFileWorker.perform_async(package_file.id)
|
||||
|
||||
ServiceResponse.success(payload: { package: package })
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
- @content_class = "limit-container-width" unless fluid_layout
|
||||
- @gfm_form = true
|
||||
- @noteable_type = 'Wiki'
|
||||
- wiki_page_title @page, _('Changes')
|
||||
- add_page_specific_style 'page_bundles/wiki'
|
||||
- commit = @diffs.diffable
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
- @gfm_form = true
|
||||
- @noteable_type = 'Wiki'
|
||||
- @content_class = "limit-container-width" unless fluid_layout
|
||||
- page_title s_("WikiClone|Git Access"), _("Wiki")
|
||||
- add_page_specific_style 'page_bundles/wiki'
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
- @content_class = "limit-container-width" unless fluid_layout
|
||||
- @gfm_form = true
|
||||
- @noteable_type = 'Wiki'
|
||||
- wiki_page_title @page, _('History')
|
||||
- add_page_specific_style 'page_bundles/wiki'
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
---
|
||||
name: respawn_assign_resource_worker
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/436988
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/147313
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/450793
|
||||
milestone: '16.11'
|
||||
group: group::environments
|
||||
name: graphql_minimal_auth_methods
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/438462
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/150407
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/444929
|
||||
milestone: '17.0'
|
||||
group: group::authentication
|
||||
type: gitlab_com_derisk
|
||||
default_enabled: false
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
---
|
||||
name: index_terraform_module_archive
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/438058
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/145020
|
||||
rollout_issue_url:
|
||||
milestone: '16.10'
|
||||
group: group::package registry
|
||||
type: wip
|
||||
default_enabled: false
|
||||
|
|
@ -7,4 +7,21 @@ feature_categories:
|
|||
description: Stores rules of a Security Orchestration Policy.
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/109624
|
||||
milestone: '15.9'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
references: projects
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: security_orchestration_policy_configuration_id
|
||||
table: security_orchestration_policy_configurations
|
||||
sharding_key: project_id
|
||||
belongs_to: security_orchestration_policy_configuration
|
||||
namespace_id:
|
||||
references: namespaces
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: security_orchestration_policy_configuration_id
|
||||
table: security_orchestration_policy_configurations
|
||||
sharding_key: namespace_id
|
||||
belongs_to: security_orchestration_policy_configuration
|
||||
|
|
|
|||
|
|
@ -4,7 +4,9 @@ classes:
|
|||
- SoftwareLicense
|
||||
feature_categories:
|
||||
- security_policy_management
|
||||
description: Normalized software licenses to use in conjunction with License Compliance features (like software license policies)
|
||||
description: Normalized software licenses to use in conjunction with License Compliance
|
||||
features (like software license policies)
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/6246
|
||||
milestone: '11.2'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/441078
|
||||
|
|
|
|||
|
|
@ -0,0 +1,44 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class SetMissingTraversalIdsForVulnerabilityReads < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '17.1'
|
||||
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
|
||||
class VulnerabilityRead < MigrationRecord
|
||||
include EachBatch
|
||||
|
||||
self.table_name = 'vulnerability_reads'
|
||||
end
|
||||
|
||||
def up
|
||||
return unless Gitlab.com?
|
||||
|
||||
VulnerabilityRead.where(traversal_ids: [], archived: false).each_batch(of: 50) do |batch|
|
||||
vulnerability_ids = batch.pluck(:vulnerability_id)
|
||||
|
||||
update_records(vulnerability_ids)
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
# no-op
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def update_records(vulnerability_ids)
|
||||
execute(<<~SQL)
|
||||
UPDATE
|
||||
vulnerability_reads
|
||||
SET
|
||||
traversal_ids = namespaces.traversal_ids
|
||||
FROM
|
||||
namespaces
|
||||
WHERE
|
||||
vulnerability_reads.vulnerability_id IN (#{vulnerability_ids.join(', ')}) AND
|
||||
namespaces.id = vulnerability_reads.namespace_id
|
||||
SQL
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
84fd9d37f90dc9d6d76e537ad67451f721d5c1e52de0366111efc173678743c8
|
||||
|
|
@ -10095,6 +10095,7 @@ Input type: `WorkspaceCreateInput`
|
|||
| <a id="mutationworkspacecreateeditor"></a>`editor` | [`String!`](#string) | Editor to inject into the created workspace. Must match a configured template. |
|
||||
| <a id="mutationworkspacecreatemaxhoursbeforetermination"></a>`maxHoursBeforeTermination` | [`Int!`](#int) | Maximum hours the workspace can exist before it is automatically terminated. |
|
||||
| <a id="mutationworkspacecreateprojectid"></a>`projectId` | [`ProjectID!`](#projectid) | ID of the project that will provide the Devfile for the created workspace. |
|
||||
| <a id="mutationworkspacecreatevariables"></a>`variables` | [`[WorkspaceVariableInput!]`](#workspacevariableinput) | Variables to inject into the workspace. |
|
||||
|
||||
#### Fields
|
||||
|
||||
|
|
@ -36082,6 +36083,14 @@ Type of a work item widget.
|
|||
| <a id="workitemwidgettypetime_tracking"></a>`TIME_TRACKING` | Time Tracking widget. |
|
||||
| <a id="workitemwidgettypeweight"></a>`WEIGHT` | Weight widget. |
|
||||
|
||||
### `WorkspaceVariableInputType`
|
||||
|
||||
Enum for the type of the variable to be injected in a workspace.
|
||||
|
||||
| Value | Description |
|
||||
| ----- | ----------- |
|
||||
| <a id="workspacevariableinputtypeenvironment"></a>`ENVIRONMENT` | Name type. |
|
||||
|
||||
## Scalar types
|
||||
|
||||
Scalar values are atomic values, and do not have fields of their own.
|
||||
|
|
@ -39036,3 +39045,15 @@ Attributes for value stream stage.
|
|||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="workitemwidgetweightinputweight"></a>`weight` | [`Int`](#int) | Weight of the work item. |
|
||||
|
||||
### `WorkspaceVariableInput`
|
||||
|
||||
Attributes for defining a variable to be injected in a workspace.
|
||||
|
||||
#### Arguments
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="workspacevariableinputkey"></a>`key` | [`String!`](#string) | Key of the variable. |
|
||||
| <a id="workspacevariableinputtype"></a>`type` | [`WorkspaceVariableInputType!`](#workspacevariableinputtype) | Type of the variable to be injected in a workspace. |
|
||||
| <a id="workspacevariableinputvalue"></a>`value` | [`String!`](#string) | Value of the variable. |
|
||||
|
|
|
|||
|
|
@ -37,4 +37,4 @@ The decision was made to store the library in the same repository during the fir
|
|||
|
||||
With that said, we still followed [the process](../../../../development/gems.md#reserve-a-gem-name) to reserve the gem on [RubyGems.org](https://rubygems.org/gems/gitlab-secret_detection) to avoid name-squatters from taking over the name and providing malicious code to 3rd-parties.
|
||||
|
||||
We have no plans to publish the gem externally at least until [Phase 2](../index.md#phase-2---standalone-pre-receive-service) as we begin to consider building a standalone service to perform secret detection.
|
||||
We have no plans to publish the gem externally at least until [Phase 2](../index.md#phase-2---standalone-secret-detection-service) as we begin to consider building a standalone service to perform secret detection.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,127 @@
|
|||
---
|
||||
owning-stage: "~devops::secure"
|
||||
description: "GitLab Secret Detection ADR 004: Secret Detection Scanner Service"
|
||||
---
|
||||
|
||||
# GitLab Secret Detection ADR 004: Secret Detection Scanner Service
|
||||
|
||||
## Context
|
||||
|
||||
In the [phase 2](../index.md#phase-2---standalone-secret-detection-service) of Secret Push Protection, the goal is to have a
|
||||
dedicated service responsible for running Secret Detection scans on the given input blobs. This is done primarily from
|
||||
the scalability standpoint. Regex operations in the Secret Detection scan [consume](https://gitlab.com/gitlab-org/gitlab/-/issues/422574#note_1582015771)
|
||||
high resources so running scans within Rails or Gitaly instances would impact the resource availability for running
|
||||
other operations. Running scans in isolation provides greater control over resource allocation and scaling the service
|
||||
independently as needed.
|
||||
|
||||
## Proposed Solution
|
||||
|
||||
We will build a standalone Secret Detection service responsible for running the Secret Detection scans.
|
||||
|
||||
The main change in the workflow of Secret Push Protection would be the delegation of scanning responsibility from the
|
||||
[Secret Detection gem](https://gitlab.com/gitlab-org/gitlab/-/tree/master/gems/gitlab-secret_detection) to the RPC
|
||||
service for GitLab SaaS i.e., the [secrets push check](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/gitlab/checks/secrets_check.rb) invokes the RPC
|
||||
service with an array of blobs to scan for secrets. Note that the project eligibility checks are still performed at the
|
||||
[Rails side](https://gitlab.com/gitlab-org/gitlab/-/blob/1a6db446abce0aa02f41d060511d7e085e3c7571/ee/lib/gitlab/checks/secrets_check.rb#L49-51).
|
||||
|
||||
### High-Level Architecture
|
||||
|
||||
The service architecture involves extracting the secret detection logic into a standalone service
|
||||
which communicates directly with both the Rails application and Gitaly. This provides a means to scale
|
||||
the secret detection nodes independently, and reduce resource usage overhead on the Rails application.
|
||||
|
||||
Scans still runs synchronously as a (potentially) blocking pre-receive transaction. The blob size remains limited to 1MB.
|
||||
|
||||
Note that the node count is purely illustrative, but serves to emphasize the independent scaling requirements for the
|
||||
scanning service.
|
||||
|
||||
```plantuml
|
||||
|
||||
@startuml Phase2
|
||||
skinparam linetype ortho
|
||||
|
||||
card "**External Load Balancer**" as elb #6a9be7
|
||||
card "**Internal Load Balancer**" as ilb #9370DB
|
||||
|
||||
together {
|
||||
collections "**GitLab Rails** x3" as gitlab #32CD32
|
||||
collections "**Sidekiq** x3" as sidekiq #ff8dd1
|
||||
}
|
||||
|
||||
together {
|
||||
collections "**Consul** x3" as consul #e76a9b
|
||||
}
|
||||
|
||||
card "SecretScanningService Cluster" as prsd_cluster {
|
||||
collections "**SecretScanningService** x5" as prsd #FF8C00
|
||||
}
|
||||
|
||||
card "Gitaly Cluster" as gitaly_cluster {
|
||||
collections "**Gitaly** x3" as gitaly #FF8C00
|
||||
}
|
||||
|
||||
card "Database" as database {
|
||||
collections "**PGBouncer** x3" as pgbouncer #4EA7FF
|
||||
}
|
||||
|
||||
elb -[#6a9be7]-> gitlab
|
||||
|
||||
gitlab -[#32CD32,norank]--> ilb
|
||||
gitlab .[#32CD32]----> database
|
||||
gitlab -[hidden]-> consul
|
||||
|
||||
sidekiq -[#ff8dd1,norank]--> ilb
|
||||
sidekiq .[#ff8dd1]----> database
|
||||
sidekiq -[hidden]-> consul
|
||||
|
||||
ilb -[#9370DB]--> prsd_cluster
|
||||
ilb -[#9370DB]--> gitaly_cluster
|
||||
ilb -[#9370DB]--> database
|
||||
ilb -[hidden]u-> consul
|
||||
|
||||
consul .[#e76a9b]u-> gitlab
|
||||
consul .[#e76a9b]u-> sidekiq
|
||||
consul .[#e76a9b]-> database
|
||||
consul .[#e76a9b]-> gitaly_cluster
|
||||
consul .[#e76a9b]-> prsd_cluster
|
||||
|
||||
@enduml
|
||||
```
|
||||
|
||||
#### Service Level Indicators(SLIs)
|
||||
|
||||
We will adopt the same SLIs followed for [GitLab Applications](../../../../development/application_slis/index.md) i.e.,
|
||||
**Apdex score**, **Error Ratio** and two additional metrics specific to the service - **Request Latency** and
|
||||
**Memory Saturation rate**.
|
||||
|
||||
#### Service Level Objectives(SLOs)
|
||||
|
||||
_We will define threshold limits after obtaining benchmark scores from the RPC service._
|
||||
|
||||
### Service Implementation
|
||||
|
||||
We will build an RPC service primarily responsible for detecting secrets in the given input blobs with RPC as the
|
||||
communication interface. This service will initially be invoked by Rails monolith when performing change access checks
|
||||
for Git Push event, and eventually extended for the other use cases too.
|
||||
|
||||
To reuse the same business logic of Scanning, in addition to offering the feature as an RPC service, the same project
|
||||
will also include the provision for distributing the feature into a Ruby Gem.
|
||||
|
||||
#### Language/Tools/Framework
|
||||
|
||||
- Ruby `3.2+`
|
||||
- gRPC framework for serving RPC requests
|
||||
- [Protobuf Service Definition](https://gitlab.com/gitlab-org/security-products/secret-detection/secret-detection-service/-/raw/main/rpc/secret_detection.proto) file
|
||||
|
||||
### Addendum
|
||||
|
||||
- The RPC service should also expose [Health Check](https://github.com/grpc/grpc/blob/master/doc/health-checking.md) RPC endpoint for ensuring the availability of the service.
|
||||
|
||||
- Unlike in Gem-based approach, we wouldn't be able to use [scan within the subprocess](003_run_scan_within_subprocess.md) approach in the RPC server
|
||||
since they [removed support](https://github.com/grpc/grpc/blob/master/doc/fork_support.md) for forking subprocess. However, we can explore optimizations like batch requests
|
||||
concurrently from the RPC client side.
|
||||
|
||||
### Reference links
|
||||
|
||||
- [Standalone Service as a concept](../../gitlab_ml_experiments/index.md)
|
||||
- [Runway: Service Deployment & Docs](https://gitlab.com/gitlab-com/gl-infra/platform/runway)
|
||||
|
|
@ -0,0 +1,104 @@
|
|||
---
|
||||
owning-stage: "~devops::secure"
|
||||
description: "GitLab Secret Detection ADR 005: Use Runway for service deployment"
|
||||
---
|
||||
|
||||
# GitLab Secret Detection ADR 005: Use Runway for service deployment
|
||||
|
||||
## Context
|
||||
|
||||
The [Secret Detection Service](004_secret_detection_scanner_service.md) requires a strategy for running automated
|
||||
deployments via GitLab CI environment.
|
||||
|
||||
## Proposed Solution: Runway
|
||||
|
||||
We could use [Runway](https://gitlab.com/gitlab-com/gl-infra/platform/runway#runway) - a GitLab internal Platform as a
|
||||
Service, which aims to enable teams to deploy and run their services quickly and safely.
|
||||
|
||||
### Platform Tooling Support
|
||||
|
||||
- **Logging**: Logging on GitLab-managed Elasticsearch/Kibana stack [isn't available](https://gitlab.com/gitlab-com/gl-infra/platform/runway/team/-/issues/84#top)
|
||||
in Runway and [there doesn't seem to be plans](https://gitlab.com/gitlab-com/gl-infra/platform/runway/team/-/issues/84#note_1691419608) to support it anytime soon. At the moment, the workaround is to view
|
||||
logs on [Google Cloud Run UI](https://cloud.google.com/run/docs/logging).
|
||||
|
||||
- **Observability**: Runway supports observability for service by integrating with the monitoring stack. The
|
||||
[default metrics](https://docs.runway.gitlab.com/reference/observability/#dashboards)([example dashboard](https://dashboards.gitlab.net/d/runway-service/runway3a-runway-service-metrics?orgId=1)) provided by Runway covers all the necessary system metrics for
|
||||
monitoring.
|
||||
|
||||
- **Pager alerts on failures**: Runway generates [alerts](https://docs.runway.gitlab.com/reference/observability/#alerts) for the following anomalies by default, which we believe
|
||||
are sufficient to get started with:
|
||||
|
||||
- `Apdex SLO violation`
|
||||
- `Error SLO violation`
|
||||
- `Traffic absent SLO violation`
|
||||
|
||||
- **Service Level Indicators (SLIs)**: The [default metrics](https://docs.runway.gitlab.com/reference/observability/#dashboards)([example dashboard](https://dashboards.gitlab.net/d/runway-service/runway3a-runway-service-metrics?orgId=1)) provided by Runway covers
|
||||
necessary [SLI requirements](004_secret_detection_scanner_service.md#service-level-indicatorsslis).
|
||||
|
||||
- **Insights**: We might need additional metrics on rule patterns like their latency, usage count, source, etc. We may
|
||||
use custom metrics, which we will evaluate further soon.
|
||||
|
||||
### Known Limitations (relevant to Secret Detection Service)
|
||||
|
||||
- ~~No support for GRPC protocol~~ Update: [GRPC is now supported](https://gitlab.com/gitlab-com/gl-infra/platform/runway/runwayctl/-/merge_requests/421#note_1934369305)
|
||||
- No support for GitLab Self-Managed environments ([Reference](https://gitlab.com/gitlab-com/gl-infra/platform/runway/team/-/issues/236))
|
||||
|
||||
### Working with Limitations
|
||||
|
||||
The limitation of Runway's missing support for Self-Managed(SM) environments made us evaluate other solutions for SM
|
||||
environments. The [Cloud Connector](../../cloud_connector/index.md)'s API-based approach would generally address the missing deployment solution
|
||||
for SM environments. However, the Secret Push Protection feature involves frequent transferring large amounts of data between
|
||||
Gitaly and Service in real-time so REST-based APIs aren't the right fit as they'd add significant network overhead
|
||||
unlike streaming data on an RPC request. We could optimize the Cloud Connector approach with some additional complexity but it will be a matter of time
|
||||
until Runway introduces a [deployment solution](https://gitlab.com/gitlab-com/gl-infra/platform/runway/team/-/issues/236)
|
||||
for SM environments. One more [alternative solution](https://gitlab.com/gitlab-org/gitlab/-/issues/462359#note_1913306661) for SM environments was to share the Docker image artifact
|
||||
along with deployment instructions with the customers (similar to [custom models approach](../../custom_models/index.md#ai-gateway-deployment)) but the horizontal
|
||||
scaling could be a concern.
|
||||
|
||||
We came up with a hybrid solution. To address the scale of GitLab SaaS, we will have a dedicated RPC-based Secret
|
||||
Detection service deployed using [Runway](https://gitlab.com/gitlab-com/gl-infra/platform/runway). This service will isolate the SD resource usage without impacting the
|
||||
resources of other services(Rails and Gitaly) and can scale independently as needed. Whereas for Self-Managed instances,
|
||||
we will continue using the current gem-based approach since that approach [performed adequately](https://gitlab.com/gitlab-org/gitlab/-/issues/431076#note_1755614298 "Enable/gather metrics - latency, memory, cpu, etc.")
|
||||
for up to GET [50K Reference architecture](https://gitlab.com/gitlab-org/quality/performance/-/wikis/Benchmarks/Latest/50k). We will eventually migrate Self-Managed environments to Runway when it
|
||||
introduces the deployment support.
|
||||
|
||||
**TL;DR:** We will use RPC service (deployed using Runway) for GitLab SaaS, and continue using the current Ruby gem
|
||||
approach for GitLab Self-Managed instances.
|
||||
|
||||
To reuse the core implementation of Secret Scanning, we will have a single source code with two different distributions:
|
||||
|
||||
1. Wrap a Ruby gem around the secret detection logic and use it in the Rails(replacing the current gem).
|
||||
|
||||
1. Wrap an RPC service around the secret detection logic, deploy it using [Runway](https://gitlab.com/gitlab-com/gl-infra/platform/runway), and invoke the service from Rails for GitLab SaaS
|
||||
|
||||
{width="1001" height="311"}
|
||||
|
||||
Here's the workflow illustrating the proposed change:
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
autonumber
|
||||
%% Phase 2: Iter 1
|
||||
Gitaly->>+Rails: invokes `internal/allowed`
|
||||
Rails->>Rails: Perform project eligibility checks
|
||||
alt On project eligibility check failure
|
||||
Rails-->>Gitaly: Scanning Skipped
|
||||
end
|
||||
Rails->>Gitaly: Get blobs
|
||||
Gitaly->>Rails: Quarantined Blobs
|
||||
Note over Rails,SD Ruby Gem: For GitLab Self-Managed
|
||||
Rails->>SD Ruby Gem: Invoke RPC and forward quarantined blobs
|
||||
SD Ruby Gem->>SD Ruby Gem: Runs Secret Detection on input blobs
|
||||
SD Ruby Gem->>Rails: Result
|
||||
Note over Rails,SD RPC Service: For GitLab SaaS (GitLab.com & Dedicated)
|
||||
Rails->>SD RPC Service: Invoke RPC and forward quarantined blobs
|
||||
SD RPC Service->>SD RPC Service: Runs Secret Detection on input blobs
|
||||
SD RPC Service->>Rails: Result
|
||||
Rails->>Gitaly: Result
|
||||
```
|
||||
|
||||
## Reference Links
|
||||
|
||||
- [Runway Docs](https://runway.gitlab.com/)
|
||||
- [Epic: Runway - Platform tooling to support AI Innovation](https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/969)
|
||||
- [Blueprint: GitLab Service-Integration: AI and Beyond](../../../blueprints/gitlab_ml_experiments/index.md)
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 516 KiB |
|
|
@ -153,6 +153,8 @@ as self-managed instances.
|
|||
- [001: Use Ruby Push Check approach within monolith](decisions/001_use_ruby_push_check_approach_within_monolith.md)
|
||||
- [002: Store the Secret Detection Gem in the same repository](decisions/002_store_the_secret_detection_gem_in_the_same_repository.md)
|
||||
- [003: Run scan within subprocess](decisions/003_run_scan_within_subprocess.md)
|
||||
- [004: Standalone Secret Detection Service](decisions/004_secret_detection_scanner_service.md)
|
||||
- [005: Use Runway for service deployment](decisions/005_use_runway_for_deployment.md)
|
||||
|
||||
## Challenges
|
||||
|
||||
|
|
@ -174,7 +176,7 @@ In expansion phases we must explore chunking or alternative strategies like the
|
|||
The detection capability relies on a multiphase rollout, from an experimental component implemented directly in the monolith to a standalone service capable of scanning text blobs generically.
|
||||
|
||||
The implementation of the secret scanning service is highly dependent on the outcomes of our benchmarking
|
||||
and capacity planning against both GitLab.com and our
|
||||
and capacity planning against both GitLab.com and
|
||||
[Reference Architectures](../../../administration/reference_architectures/index.md).
|
||||
As the scanning capability must be an on-by-default component of both our SaaS and self-managed
|
||||
instances, [each iteration's](#iterations) deployment characteristic defines whether
|
||||
|
|
@ -306,123 +308,55 @@ located [here](https://gitlab.com/gitlab-org/gitlab/-/blob/2da1c72dbc9df4d913026
|
|||
|
||||
More details about the Gem can be found in the [README](https://gitlab.com/gitlab-org/gitlab/-/blob/master/gems/gitlab-secret_detection/README.md) file. Also see [ADR 002](decisions/002_store_the_secret_detection_gem_in_the_same_repository.md) for more on how the Gem code is stored and distributed.
|
||||
|
||||
### Phase 2 - Standalone pre-receive service
|
||||
### Phase 2 - Standalone Secret Detection service
|
||||
|
||||
The critical paths as outlined under [goals above](#goals) cover two major object
|
||||
types: Git text blobs (corresponding to push events) and arbitrary text blobs. In Phase 2,
|
||||
we continue to focus on Git text blobs.
|
||||
This phase emphasizes scaling the service outside of the monolith for general availability, isolating feature's resource
|
||||
consumption, and ease of maintainability. The critical paths as outlined under [goals above](#goals) cover
|
||||
two major object types: Git text blobs (corresponding to push events) and arbitrary text blobs. In Phase 2, we continue
|
||||
to focus on Git text blobs.
|
||||
|
||||
This phase emphasizes scaling the service outside of the monolith for general availability and to allow
|
||||
an on-by-default behavior. The architecture is adapted to provide an isolated and independently
|
||||
scalable service outside of the Rails monolith.
|
||||
The responsibility of the service will be limited to running Secret Detection scan on the given set of input blobs. More
|
||||
details about the service are outlined in [ADR 004: Secret Detection Scanner Service](decisions/004_secret_detection_scanner_service.md).
|
||||
|
||||
In the case of a push detection, the commit is rejected inline and error returned to the end user.
|
||||
|
||||
#### Configuration
|
||||
|
||||
This phase will be considered "generally available" and on-by-default, with disablement configuration through organization-level settings.
|
||||
|
||||
#### High-Level Architecture
|
||||
|
||||
The Phase 2 architecture involves extracting the secret detection logic into a standalone service
|
||||
which communicates directly with both the Rails application and Gitaly. This provides a means to scale
|
||||
the secret detection nodes independently, and reduce resource usage overhead on the rails application.
|
||||
|
||||
Scans still runs synchronously as a (potentially) blocking pre-receive transaction. The blob size remains limited to 1MB.
|
||||
|
||||
Note that the node count is purely illustrative, but serves to emphasize the independent scaling requirements for the scanning service.
|
||||
|
||||
```plantuml
|
||||
|
||||
@startuml Phase2
|
||||
skinparam linetype ortho
|
||||
|
||||
card "**External Load Balancer**" as elb #6a9be7
|
||||
card "**Internal Load Balancer**" as ilb #9370DB
|
||||
|
||||
together {
|
||||
collections "**GitLab Rails** x3" as gitlab #32CD32
|
||||
collections "**Sidekiq** x3" as sidekiq #ff8dd1
|
||||
}
|
||||
|
||||
together {
|
||||
collections "**Consul** x3" as consul #e76a9b
|
||||
}
|
||||
|
||||
card "SecretScanningService Cluster" as prsd_cluster {
|
||||
collections "**SecretScanningService** x5" as prsd #FF8C00
|
||||
}
|
||||
|
||||
card "Gitaly Cluster" as gitaly_cluster {
|
||||
collections "**Gitaly** x3" as gitaly #FF8C00
|
||||
}
|
||||
|
||||
card "Database" as database {
|
||||
collections "**PGBouncer** x3" as pgbouncer #4EA7FF
|
||||
}
|
||||
|
||||
elb -[#6a9be7]-> gitlab
|
||||
|
||||
gitlab -[#32CD32,norank]--> ilb
|
||||
gitlab .[#32CD32]----> database
|
||||
gitlab -[hidden]-> consul
|
||||
|
||||
sidekiq -[#ff8dd1,norank]--> ilb
|
||||
sidekiq .[#ff8dd1]----> database
|
||||
sidekiq -[hidden]-> consul
|
||||
|
||||
ilb -[#9370DB]--> prsd_cluster
|
||||
ilb -[#9370DB]--> gitaly_cluster
|
||||
ilb -[#9370DB]--> database
|
||||
ilb -[hidden]u-> consul
|
||||
|
||||
consul .[#e76a9b]u-> gitlab
|
||||
consul .[#e76a9b]u-> sidekiq
|
||||
consul .[#e76a9b]-> database
|
||||
consul .[#e76a9b]-> gitaly_cluster
|
||||
consul .[#e76a9b]-> prsd_cluster
|
||||
|
||||
@enduml
|
||||
```
|
||||
|
||||
#### Push Event Detection Flow
|
||||
The introduction of a dedicated service impacts the workflow for Secret Push Protection as follows:
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
autonumber
|
||||
actor User
|
||||
User->>+Workhorse: git push with-secret
|
||||
Workhorse->>+Gitaly: tcp
|
||||
Gitaly->>+GitLabSecretDetection: PreReceive
|
||||
GitLabSecretDetection->>-Gitaly: ListAllBlobs
|
||||
Gitaly->>-GitLabSecretDetection: ListAllBlobsResponse
|
||||
|
||||
Gitaly->>+GitLabSecretDetection: PreReceive
|
||||
|
||||
GitLabSecretDetection->>GitLabSecretDetection: Scan(blob)
|
||||
GitLabSecretDetection->>-Gitaly: found
|
||||
|
||||
Gitaly->>+Rails: PreReceive
|
||||
|
||||
Rails->>User: rejected: secret found
|
||||
|
||||
User->>+Workhorse: git push without-secret
|
||||
Workhorse->>+Gitaly: tcp
|
||||
Gitaly->>+GitLabSecretDetection: PreReceive
|
||||
GitLabSecretDetection->>-Gitaly: ListAllBlobs
|
||||
Gitaly->>-GitLabSecretDetection: ListAllBlobsResponse
|
||||
|
||||
Gitaly->>+GitLabSecretDetection: PreReceive
|
||||
|
||||
GitLabSecretDetection->>GitLabSecretDetection: Scan(blob)
|
||||
GitLabSecretDetection->>-Gitaly: not_found
|
||||
|
||||
Gitaly->>+Rails: PreReceive
|
||||
|
||||
Rails->>User: accepted
|
||||
%% Phase 2: Iter 1
|
||||
Gitaly->>+Rails: invokes `/internal/allowed` API endpoint
|
||||
Rails->>Rails: Perform project eligibility checks
|
||||
alt On access check failure
|
||||
Rails-->>Gitaly: Scanning Skipped
|
||||
end
|
||||
Rails->>Gitaly: Fetch blobs
|
||||
Gitaly->>Rails: Quarantined Blobs
|
||||
Rails->>Secret Detection Service: Invoke scan by embedding blobs
|
||||
Secret Detection Service->>Secret Detection Service: Runs Secret Detection on input blobs
|
||||
Secret Detection Service->>Rails: Result
|
||||
Rails->>Gitaly: Result
|
||||
```
|
||||
|
||||
### Phase 3 - Expansion beyond pre-receive service
|
||||
The Secret Detection service addresses the previous phase's limitations of feature scalability and shared-resource
|
||||
consumption. However, the Secret Push Protection workflow still requires Rails monolith to load large amount of
|
||||
Git blobs fetched from Gitaly into its own memory before passing it down to the Secret Detection Service.
|
||||
|
||||
### Phase 2.1 - Invoke Push Protection directly from Gitaly
|
||||
|
||||
Until the previous phase, there are multiple hops made between Gitaly and Rails for running Pre-receive checks,
|
||||
particularly for Secret Push protection so a fairly large amount of Rails memory is occupied for holding Git blobs to
|
||||
pass them to the Gem/Service for running secret scan. This problem can be mitigated through a direct interaction between
|
||||
the Secret Detection service and Gitaly via standard interface (either [Custom pre-receive hook](../../../administration/server_hooks.md#create-global-server-hooks-for-all-repositories)
|
||||
or Gitaly's new [Plugin-based architecture](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/143582)). This setup
|
||||
skips the need for Rails to be a blob messenger between Gitaly and Service.
|
||||
|
||||
Gitaly's new [Plugin-based architecture](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/143582) is the
|
||||
preferred interface for interacting b/w Gitaly and RPC service as it provides streamlined access to the Git blob
|
||||
repository. However, Gitaly team is yet to take it up for development.
|
||||
|
||||
_More details on Phase 2.1 will be added once there are updates on the development of Plugin architecture._
|
||||
|
||||
### Phase 3 - Expansion beyond Push Protection service
|
||||
|
||||
The detection flow for arbitrary text blobs, such as issue comments, relies on
|
||||
subscribing to `Notes::PostProcessService` (or equivalent service) to enqueue
|
||||
|
|
@ -452,7 +386,7 @@ There is no change to the architecture defined in Phase 2, however the individua
|
|||
#### Push Event Detection Flow
|
||||
|
||||
There is no change to the push event detection flow defined in Phase 2, however the added capability to scan
|
||||
arbitary text blobs directly from Rails allows us to emulate a pre-receive behavior for issuable creations,
|
||||
arbitrary text blobs directly from Rails allows us to emulate a pre-receive behavior for issuable creations,
|
||||
as well (see [target types](#target-types) for priority object types).
|
||||
|
||||
```mermaid
|
||||
|
|
|
|||
|
|
@ -31,6 +31,15 @@ you have removed any orphaned rows. The method `add_concurrent_foreign_key`
|
|||
does not take care of this so you must do so manually. See
|
||||
[adding foreign key constraint to an existing column](add_foreign_key_to_existing_column.md).
|
||||
|
||||
## Use bigint for foreign keys
|
||||
|
||||
When adding a new foreign key, you should define it as `bigint`.
|
||||
Even if the referenced table has an `integer` primary key type,
|
||||
you must reference the new foreign key as `bigint`. As we are
|
||||
migrating all primary keys to `bigint`, using `bigint` foreign keys
|
||||
saves time, and requires fewer steps, when migrating the parent table
|
||||
to `bigint` primary keys.
|
||||
|
||||
## Updating foreign keys in migrations
|
||||
|
||||
Sometimes a foreign key constraint must be changed, preserving the column
|
||||
|
|
|
|||
|
|
@ -1123,13 +1123,8 @@ To describe multiple fields, use unordered list items:
|
|||
|
||||
## Illustrations
|
||||
|
||||
Use illustrations only to supplement text, not replace it.
|
||||
|
||||
Illustrations can help the reader understand:
|
||||
|
||||
- A concept.
|
||||
- Where they are in a complicated process.
|
||||
- How they should interact with the application.
|
||||
Illustrations can help the reader understand a concept, where they are in a complicated process,
|
||||
or how they should interact with the application.
|
||||
|
||||
Use illustrations sparingly because:
|
||||
|
||||
|
|
@ -1139,67 +1134,10 @@ Use illustrations sparingly because:
|
|||
|
||||
Types of illustrations used in GitLab documentation are:
|
||||
|
||||
- Diagram. Use a diagram to illustrate a process or the relationship between entities, for example.
|
||||
- Screenshot. Use a screenshot when you need to show a portion of the GitLab user interface.
|
||||
- Diagram. Use a diagram to illustrate a process or the relationship between entities, for example.
|
||||
|
||||
Use a diagram instead of a screenshot when possible because:
|
||||
|
||||
- A diagram's file size is usually much smaller than that of a screenshot.
|
||||
- A screenshot often needs to be compressed, which generally reduces the image's quality.
|
||||
- A diagram in SVG format can be displayed at any size without affecting the image's quality.
|
||||
|
||||
### Diagram
|
||||
|
||||
Use a diagram to illustrate a process or the relationship between entities, for example.
|
||||
|
||||
Use [Mermaid](https://mermaid.js.org/#/) to create a diagram. This method has several advantages
|
||||
over a static image format (screenshot):
|
||||
|
||||
- The Mermaid format is easier to maintain because:
|
||||
- Their definition is stored as a code block in the documentation's Markdown source.
|
||||
- The diagram is rendered dynamically at runtime.
|
||||
- Text content that may change over time, such as feature names, can be found using text search
|
||||
tools and edited.
|
||||
- The diagram is rendered as an scalable image, better suited to various output devices and sizes.
|
||||
|
||||
#### Create a diagram
|
||||
|
||||
To create a diagram:
|
||||
|
||||
1. Use the [Mermaid Live Editor](https://mermaid.live/) to create the diagram.
|
||||
1. Copy the content of the **Code** pane into a `mermaid` code block in the Markdown file. For more
|
||||
details, see [Mermaid](../../../user/markdown.md#mermaid).
|
||||
1. Optional. To add GitLab font styling to your diagram, add this line between the Mermaid
|
||||
code block declaration and the type of diagram:
|
||||
|
||||
```plaintext
|
||||
%%{init: { "fontFamily": "GitLab Sans" }}%%
|
||||
```
|
||||
|
||||
1. To improve accessibility of diagrams, add a title and description. Add these lines on the next
|
||||
line after declaring the type of diagram, like `flowchart` or `sequenceDiagram`:
|
||||
|
||||
```yaml
|
||||
accTitle: your diagram title here
|
||||
accDescr: describe what your diagram does in a single sentence, with no line breaks.
|
||||
```
|
||||
|
||||
For example, this flowchart contains both accessibility and font information:
|
||||
|
||||
````markdown
|
||||
```mermaid
|
||||
%%{init: { "fontFamily": "GitLab Sans" }}%%
|
||||
flowchart TD
|
||||
accTitle: Example diagram title
|
||||
accDescr: A description of your diagram
|
||||
|
||||
A[Start here] -->|action| B[next step]
|
||||
```
|
||||
````
|
||||
|
||||
The Mermaid diagram syntax can be difficult to learn. To make this a little easier, see the Mermaid
|
||||
[Beginner's Guide](https://mermaid.js.org/intro/getting-started.html) and the examples on the
|
||||
Mermaid site.
|
||||
Use illustrations only to supplement text, not replace it.
|
||||
|
||||
### Screenshot
|
||||
|
||||
|
|
@ -1371,6 +1309,59 @@ You can take a screenshot of a single element.
|
|||
|
||||
Use `spec/docs_screenshots/container_registry_docs.rb` as a guide to create your own scripts.
|
||||
|
||||
### Diagram
|
||||
|
||||
Use a diagram to illustrate a process or the relationship between entities, for example.
|
||||
|
||||
Use [Mermaid](https://mermaid.js.org/#/) to create a diagram. This method has several advantages
|
||||
over a static image format (screenshot):
|
||||
|
||||
- The Mermaid format is easier to maintain because:
|
||||
- Their definition is stored as a code block in the documentation's Markdown source.
|
||||
- The diagram is rendered dynamically at runtime.
|
||||
- Text content that may change over time, such as feature names, can be found using text search
|
||||
tools and edited.
|
||||
- The diagram is rendered as an scalable image, better suited to various output devices and sizes.
|
||||
|
||||
#### Create a diagram
|
||||
|
||||
To create a diagram:
|
||||
|
||||
1. Use the [Mermaid Live Editor](https://mermaid.live/) to create the diagram.
|
||||
1. Copy the content of the **Code** pane into a `mermaid` code block in the Markdown file. For more
|
||||
details, see [Mermaid](../../../user/markdown.md#mermaid).
|
||||
1. Optional. To add GitLab font styling to your diagram, add this line between the Mermaid
|
||||
code block declaration and the type of diagram:
|
||||
|
||||
```plaintext
|
||||
%%{init: { "fontFamily": "GitLab Sans" }}%%
|
||||
```
|
||||
|
||||
1. To improve accessibility of diagrams, add a title and description. Add these lines on the next
|
||||
line after declaring the type of diagram, like `flowchart` or `sequenceDiagram`:
|
||||
|
||||
```yaml
|
||||
accTitle: your diagram title here
|
||||
accDescr: describe what your diagram does in a single sentence, with no line breaks.
|
||||
```
|
||||
|
||||
For example, this flowchart contains both accessibility and font information:
|
||||
|
||||
````markdown
|
||||
```mermaid
|
||||
%%{init: { "fontFamily": "GitLab Sans" }}%%
|
||||
flowchart TD
|
||||
accTitle: Example diagram title
|
||||
accDescr: A description of your diagram
|
||||
|
||||
A[Start here] -->|action| B[next step]
|
||||
```
|
||||
````
|
||||
|
||||
The Mermaid diagram syntax can be difficult to learn. To make this a little easier, see the Mermaid
|
||||
[Beginner's Guide](https://mermaid.js.org/intro/getting-started.html) and the examples on the
|
||||
Mermaid site.
|
||||
|
||||
## Emoji
|
||||
|
||||
Don't use the Markdown emoji format, for example `:smile:`, for any purpose. Use
|
||||
|
|
|
|||
|
|
@ -440,6 +440,10 @@ In this illustration, you can see that the Apdex score started to decline after
|
|||
|
||||

|
||||
|
||||
Certain features necessitate extensive monitoring over multiple days, particularly those that are high-risk and critical to business operations. In contrast, other features may only require a 24-hour monitoring period before continuing with the rollout.
|
||||
|
||||
It is recommended to determine the necessary extent of monitoring before initiating the rollout.
|
||||
|
||||
### Feature flag change logging
|
||||
|
||||
#### ChatOps level
|
||||
|
|
|
|||
|
|
@ -725,7 +725,7 @@ The following are some available Rake tasks:
|
|||
| Task | Description |
|
||||
|:--------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| [`sudo gitlab-rake gitlab:elastic:info`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Outputs debugging information for the advanced search integration. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:index`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Enables Elasticsearch indexing and runs `gitlab:elastic:recreate_index`, `gitlab:elastic:clear_index_status`, `gitlab:elastic:index_group_entities`, `gitlab:elastic:index_projects`, `gitlab:elastic:index_snippets`, and `gitlab:elastic:index_users`. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:index`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | In GitLab 17.0 and earlier, enables Elasticsearch indexing and runs `gitlab:elastic:recreate_index`, `gitlab:elastic:clear_index_status`, `gitlab:elastic:index_group_entities`, `gitlab:elastic:index_projects`, `gitlab:elastic:index_snippets`, and `gitlab:elastic:index_users`.<br>In GitLab 17.1 and later, queues a Sidekiq job in the background. First, the job enables Elasticsearch indexing and pauses indexing to ensure all indices are created. Then, the job re-creates all indices, clears indexing status, and queues additional Sidekiq jobs to index project and group data, snippets, and users. Finally, Elasticsearch indexing is resumed to complete. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/421298) in GitLab 17.1 [with a flag](../../administration/feature_flags.md) named `elastic_index_use_trigger_indexing`. Enabled by default. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:pause_indexing`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Pauses Elasticsearch indexing. Changes are still tracked. Useful for cluster/index migrations. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:resume_indexing`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Resumes Elasticsearch indexing. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:index_projects`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Iterates over all projects, and queues Sidekiq jobs to index them in the background. It can only be used after the index is created. |
|
||||
|
|
|
|||
|
|
@ -11,19 +11,24 @@ module Banzai
|
|||
def parent_records(parent, ids)
|
||||
return Label.none unless parent.is_a?(Project) || parent.is_a?(Group)
|
||||
|
||||
labels = find_labels(parent)
|
||||
label_ids = ids.map { |y| y[:label_id] }.compact
|
||||
relation = []
|
||||
|
||||
unless label_ids.empty?
|
||||
id_relation = labels.where(id: label_ids)
|
||||
# We need to handle relative and absolute paths separately
|
||||
labels_absolute_indexed = ids.group_by { |id| id[:absolute_path] }
|
||||
labels_absolute_indexed.each do |absolute_path, fitered_ids|
|
||||
label_ids = fitered_ids&.pluck(:label_id)&.compact
|
||||
|
||||
if label_ids.present?
|
||||
relation << find_labels(parent, absolute_path: absolute_path).where(id: label_ids)
|
||||
end
|
||||
|
||||
label_names = fitered_ids&.pluck(:label_name)&.compact
|
||||
if label_names.present?
|
||||
relation << find_labels(parent, absolute_path: absolute_path).where(name: label_names)
|
||||
end
|
||||
end
|
||||
|
||||
label_names = ids.map { |y| y[:label_name] }.compact
|
||||
unless label_names.empty?
|
||||
label_relation = labels.where(title: label_names)
|
||||
end
|
||||
|
||||
relation = [id_relation, label_relation].compact
|
||||
relation.compact!
|
||||
return Label.none if relation.all?(Label.none)
|
||||
|
||||
Label.from_union(relation)
|
||||
|
|
@ -46,7 +51,13 @@ module Banzai
|
|||
# or the label_name, but not both. But below, we have both pieces of information.
|
||||
# But it's accounted for in `find_object`
|
||||
def parse_symbol(symbol, match_data)
|
||||
{ label_id: match_data[:label_id]&.to_i, label_name: match_data[:label_name]&.tr('"', '') }
|
||||
absolute_path = !!match_data&.named_captures&.fetch('absolute_path')
|
||||
|
||||
{
|
||||
label_id: match_data[:label_id]&.to_i,
|
||||
label_name: match_data[:label_name]&.tr('"', ''),
|
||||
absolute_path: absolute_path
|
||||
}
|
||||
end
|
||||
|
||||
# We assume that most classes are identifying records by ID.
|
||||
|
|
@ -78,27 +89,36 @@ module Banzai
|
|||
escape_with_placeholders(unescaped_html, labels)
|
||||
end
|
||||
|
||||
def find_labels(parent)
|
||||
params = if parent.is_a?(Group)
|
||||
{ group_id: parent.id,
|
||||
include_ancestor_groups: true,
|
||||
only_group_labels: true }
|
||||
else
|
||||
{ project: parent,
|
||||
include_ancestor_groups: true }
|
||||
end
|
||||
def find_labels(parent, absolute_path: false)
|
||||
params = label_finder_params(parent, absolute_path)
|
||||
|
||||
LabelsFinder.new(nil, params).execute(skip_authorization: true)
|
||||
end
|
||||
|
||||
def label_finder_params(parent, absolute_path)
|
||||
params = if parent.is_a?(Group)
|
||||
{ group_id: parent.id, only_group_labels: true }
|
||||
else
|
||||
{ project: parent }
|
||||
end
|
||||
|
||||
params[:include_ancestor_groups] = !absolute_path
|
||||
|
||||
params
|
||||
end
|
||||
|
||||
def url_for_object(label, parent)
|
||||
label_url_method =
|
||||
if context[:label_url_method]
|
||||
context[:label_url_method]
|
||||
elsif parent.is_a?(Project)
|
||||
:project_issues_url
|
||||
elsif parent.is_a?(Group)
|
||||
:issues_group_url
|
||||
end
|
||||
|
||||
label_url_method = :issues_group_url if parent.is_a?(Group) && label_url_method == :project_issues_url
|
||||
|
||||
return unless label_url_method
|
||||
|
||||
Gitlab::Routing.url_helpers.public_send(label_url_method, parent, label_name: label.name, only_path: context[:only_path]) # rubocop:disable GitlabSecurity/PublicSend
|
||||
|
|
@ -108,7 +128,7 @@ module Banzai
|
|||
label_suffix = ''
|
||||
parent = project || group
|
||||
|
||||
if project || full_path_ref?(matches)
|
||||
if matches[:absolute_path].blank? && (project || full_path_ref?(matches))
|
||||
project_path = reference_cache.full_project_path(matches[:namespace], matches[:project], matches)
|
||||
parent_from_ref = from_ref_cached(project_path)
|
||||
reference = parent_from_ref.to_human_reference(parent)
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ module Gitlab
|
|||
# table_name can include schema name as a prefix. For example: 'gitlab_partitions_static.events_03',
|
||||
# otherwise, it will default to current used schema, for example 'public'.
|
||||
def initialize(table_name:, connection:, database_name:, with_retries: true, logger: nil, dry_run: false)
|
||||
@table_name = table_name
|
||||
@table_name = table_name.to_s
|
||||
@connection = connection
|
||||
@database_name = database_name
|
||||
@logger = logger
|
||||
|
|
@ -36,6 +36,11 @@ module Gitlab
|
|||
end
|
||||
|
||||
def lock_writes
|
||||
unless table_exist?
|
||||
logger&.info "Skipping lock_writes, because #{table_name} does not exist"
|
||||
return result_hash(action: 'skipped')
|
||||
end
|
||||
|
||||
if table_locked_for_writes?
|
||||
logger&.info "Skipping lock_writes, because #{table_name} is already locked for writes"
|
||||
return result_hash(action: 'skipped')
|
||||
|
|
@ -74,6 +79,18 @@ module Gitlab
|
|||
|
||||
attr_reader :table_name, :connection, :database_name, :logger, :dry_run, :table_name_without_schema, :with_retries
|
||||
|
||||
def table_exist?
|
||||
where = if table_name.include?('.')
|
||||
schema, table = table_name.split('.')
|
||||
|
||||
"#{Arel.sql('table_name').eq(table).to_sql} AND #{Arel.sql('table_schema').eq(schema).to_sql}"
|
||||
else
|
||||
"#{Arel.sql('table_name').eq(table_name).to_sql} AND table_schema = current_schema()"
|
||||
end
|
||||
|
||||
@connection.execute("SELECT table_name FROM information_schema.tables WHERE #{where}").any?
|
||||
end
|
||||
|
||||
def process_query(sql, action)
|
||||
if dry_run
|
||||
logger&.info sql
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ module RemoteDevelopment
|
|||
# err_result will be set to a non-nil Result.err if type check fails
|
||||
err_result = Result.err(SettingsCurrentSettingsReadFailed.new(
|
||||
details: "Gitlab::CurrentSettings.#{setting_name} type of '#{current_setting_value.class}' " \
|
||||
"did not match initialized Remote Development Settings type of '#{setting_type}'." # rubocop:disable Layout/LineEndStringConcatenationIndentation -- use default RubyMine formatting
|
||||
"did not match initialized Remote Development Settings type of '#{setting_type}'."
|
||||
))
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -3263,6 +3263,9 @@ msgstr ""
|
|||
msgid "Add topics to projects to help users find them."
|
||||
msgstr ""
|
||||
|
||||
msgid "Add variable"
|
||||
msgstr ""
|
||||
|
||||
msgid "Add vulnerability finding"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -34803,6 +34806,9 @@ msgstr ""
|
|||
msgid "No user provided"
|
||||
msgstr ""
|
||||
|
||||
msgid "No variables"
|
||||
msgstr ""
|
||||
|
||||
msgid "No vulnerabilities present"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -43717,6 +43723,9 @@ msgstr ""
|
|||
msgid "Remove user from project"
|
||||
msgstr ""
|
||||
|
||||
msgid "Remove variable"
|
||||
msgstr ""
|
||||
|
||||
msgid "Remove weight"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -905,12 +905,6 @@ RSpec.describe 'Pipeline', :js, feature_category: :continuous_integration do
|
|||
end
|
||||
|
||||
context 'when build requires resource', :sidekiq_inline do
|
||||
before do
|
||||
allow_next_instance_of(Ci::ResourceGroups::AssignResourceFromResourceGroupService) do |resource_service|
|
||||
allow(resource_service).to receive(:respawn_assign_resource_worker)
|
||||
end
|
||||
end
|
||||
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
|
||||
let(:pipeline) { create(:ci_pipeline, project: project) }
|
||||
|
|
|
|||
|
|
@ -672,9 +672,11 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
|
|||
end
|
||||
|
||||
describe 'group context' do
|
||||
let_it_be(:group) { create(:group) }
|
||||
let_it_be(:subgroup) { create(:group, parent: group) }
|
||||
let_it_be(:label) { create(:group_label, group: group) }
|
||||
|
||||
it 'points to the page defined in label_url_method' do
|
||||
group = create(:group)
|
||||
label = create(:group_label, group: group)
|
||||
reference = "~#{label.name}"
|
||||
|
||||
result = reference_filter("See #{reference}", { project: nil, group: group, label_url_method: :group_url } )
|
||||
|
|
@ -683,9 +685,6 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
|
|||
end
|
||||
|
||||
it 'finds labels also in ancestor groups' do
|
||||
group = create(:group)
|
||||
label = create(:group_label, group: group)
|
||||
subgroup = create(:group, parent: group)
|
||||
reference = "~#{label.name}"
|
||||
|
||||
result = reference_filter("See #{reference}", { project: nil, group: subgroup, label_url_method: :group_url } )
|
||||
|
|
@ -698,13 +697,121 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
|
|||
label = create(:label, project: project)
|
||||
reference = "#{project.full_path}~#{label.name}"
|
||||
|
||||
result = reference_filter("See #{reference}", { project: nil, group: create(:group) } )
|
||||
result = reference_filter("See #{reference}", { project: nil, group: group } )
|
||||
|
||||
expect(result.css('a').first.attr('href')).to eq(urls.project_issues_url(project, label_name: label.name))
|
||||
expect(result.css('a').first.text).to eq "#{label.name} in #{project.full_name}"
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'absolute group reference' do
|
||||
it 'supports absolute reference' do
|
||||
absolute_reference = "/#{reference}"
|
||||
|
||||
result = reference_filter("See #{absolute_reference}", context)
|
||||
|
||||
if context[:label_url_method] == :group_url
|
||||
expect(result.css('a').first.attr('href')).to eq(urls.group_url(group, label_name: group_label.name))
|
||||
else
|
||||
expect(result.css('a').first.attr('href')).to eq(urls.issues_group_url(group, label_name: group_label.name))
|
||||
end
|
||||
|
||||
expect(result.css('a').first.attr('data-original')).to eq absolute_reference
|
||||
expect(result.content).to eq "See #{group_label.name}"
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'absolute project reference' do
|
||||
it 'supports absolute reference' do
|
||||
absolute_reference = "/#{reference}"
|
||||
|
||||
result = reference_filter("See #{absolute_reference}", context)
|
||||
|
||||
if context[:label_url_method] == :project_merge_requests_url
|
||||
expect(result.css('a').first.attr('href')).to eq(urls.project_merge_requests_url(project, label_name: project_label.name))
|
||||
else
|
||||
expect(result.css('a').first.attr('href')).to eq(urls.project_issues_url(project, label_name: project_label.name))
|
||||
end
|
||||
|
||||
expect(result.css('a').first.attr('data-original')).to eq absolute_reference
|
||||
expect(result.content).to eq "See #{project_label.name}"
|
||||
end
|
||||
end
|
||||
|
||||
describe 'absolute label references' do
|
||||
let_it_be(:parent_group) { create(:group) }
|
||||
let_it_be(:group) { create(:group, parent: parent_group) }
|
||||
let_it_be(:project) { create(:project, :public, group: group) }
|
||||
let_it_be(:project_label) { create(:label, project: project) }
|
||||
let_it_be(:group_label) { create(:group_label, group: group) }
|
||||
let_it_be(:parent_group_label) { create(:group_label, group: parent_group) }
|
||||
let_it_be(:another_parent_group) { create(:group) }
|
||||
let_it_be(:another_group) { create(:group, parent: another_parent_group) }
|
||||
let_it_be(:another_project) { create(:project, :public, group: another_group) }
|
||||
|
||||
context 'with a project label' do
|
||||
let(:reference) { "#{project.full_path}~#{project_label.name}" }
|
||||
|
||||
it_behaves_like 'absolute project reference' do
|
||||
let(:context) { { project: project } }
|
||||
end
|
||||
|
||||
it_behaves_like 'absolute project reference' do
|
||||
let(:context) { { project: project, label_url_method: :project_merge_requests_url } }
|
||||
end
|
||||
end
|
||||
|
||||
context 'with a group label' do
|
||||
let_it_be(:reference) { "#{group.full_path}~#{group_label.name}" }
|
||||
|
||||
it_behaves_like 'absolute group reference' do
|
||||
let(:context) { { project: nil, group: group } }
|
||||
end
|
||||
|
||||
it_behaves_like 'absolute group reference' do
|
||||
let(:context) { { project: nil, group: group, label_url_method: :group_url } }
|
||||
end
|
||||
end
|
||||
|
||||
describe 'cross-project absolute reference' do
|
||||
let_it_be(:context) { { project: another_project } }
|
||||
|
||||
# a normal cross-project label works fine. So check just the absolute version.
|
||||
it_behaves_like 'absolute project reference' do
|
||||
let_it_be(:reference) { "#{project.full_path}~#{project_label.name}" }
|
||||
end
|
||||
|
||||
it 'does not find label in ancestors' do
|
||||
reference = "/#{project.full_path}~#{parent_group_label.name}"
|
||||
result = reference_filter("See #{reference}", context)
|
||||
|
||||
expect(result.to_html).to eq "See #{reference}"
|
||||
end
|
||||
end
|
||||
|
||||
describe 'cross-group absolute reference' do
|
||||
let_it_be(:context) { { project: nil, group: another_group } }
|
||||
|
||||
it 'can not find the label' do
|
||||
reference = "#{group.full_path}~#{group_label.name}"
|
||||
result = reference_filter("See #{reference}", context)
|
||||
|
||||
expect(result.to_html).to eq "See #{reference}"
|
||||
end
|
||||
|
||||
it_behaves_like 'absolute group reference' do
|
||||
let_it_be(:reference) { "#{group.full_path}~#{group_label.name}" }
|
||||
end
|
||||
|
||||
it 'does not find label in ancestors' do
|
||||
reference = "/#{group.full_path}~#{parent_group_label.name}"
|
||||
result = reference_filter("See #{reference}", context)
|
||||
|
||||
expect(result.to_html).to eq "See #{reference}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'checking N+1' do
|
||||
let_it_be(:group) { create(:group) }
|
||||
let_it_be(:group2) { create(:group) }
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ require 'spec_helper'
|
|||
RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: :cell do
|
||||
let(:connection) { ApplicationRecord.connection }
|
||||
let(:test_table) { '_test_table' }
|
||||
let(:non_existent_table) { 'non_existent' }
|
||||
let(:skip_table_creation) { false }
|
||||
let(:logger) { instance_double(Logger) }
|
||||
let(:dry_run) { false }
|
||||
|
||||
|
|
@ -23,16 +25,18 @@ RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: :
|
|||
allow(connection).to receive(:execute).and_call_original
|
||||
allow(logger).to receive(:info)
|
||||
|
||||
connection.execute(<<~SQL)
|
||||
CREATE TABLE #{test_table} (id integer NOT NULL, value integer NOT NULL DEFAULT 0);
|
||||
unless skip_table_creation
|
||||
connection.execute(<<~SQL)
|
||||
CREATE TABLE #{test_table} (id integer NOT NULL, value integer NOT NULL DEFAULT 0);
|
||||
|
||||
INSERT INTO #{test_table} (id, value)
|
||||
VALUES (1, 1), (2, 2), (3, 3)
|
||||
SQL
|
||||
INSERT INTO #{test_table} (id, value)
|
||||
VALUES (1, 1), (2, 2), (3, 3)
|
||||
SQL
|
||||
end
|
||||
end
|
||||
|
||||
after do
|
||||
ApplicationRecord.connection.execute("DROP TABLE IF EXISTS #{test_table}")
|
||||
ApplicationRecord.connection.execute("DROP TABLE IF EXISTS #{test_table}") unless skip_table_creation
|
||||
end
|
||||
|
||||
describe "#table_locked_for_writes?" do
|
||||
|
|
@ -125,6 +129,23 @@ RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: :
|
|||
}
|
||||
end
|
||||
|
||||
context 'when table does not exist' do
|
||||
let(:skip_table_creation) { true }
|
||||
let(:test_table) { non_existent_table }
|
||||
|
||||
it 'skips locking table' do
|
||||
expect(logger).to receive(:info).with("Skipping lock_writes, because #{test_table} does not exist")
|
||||
expect(connection).not_to receive(:execute).with(/CREATE TRIGGER/)
|
||||
|
||||
expect do
|
||||
result = subject.lock_writes
|
||||
expect(result).to eq({ action: "skipped", database: "main", dry_run: false, table: test_table })
|
||||
end.not_to change {
|
||||
number_of_triggers_on(connection, test_table)
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
context 'when running in dry_run mode' do
|
||||
let(:dry_run) { true }
|
||||
|
||||
|
|
@ -206,6 +227,20 @@ RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: :
|
|||
subject.unlock_writes
|
||||
end
|
||||
|
||||
context 'when table does not exist' do
|
||||
let(:skip_table_creation) { true }
|
||||
let(:test_table) { non_existent_table }
|
||||
|
||||
it 'skips unlocking table' do
|
||||
subject.unlock_writes
|
||||
|
||||
expect(subject).not_to receive(:execute_sql_statement)
|
||||
expect(subject.unlock_writes).to eq(
|
||||
{ action: "skipped", database: "main", dry_run: dry_run, table: test_table }
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when running in dry_run mode' do
|
||||
let(:dry_run) { true }
|
||||
|
||||
|
|
|
|||
|
|
@ -131,8 +131,6 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager, feature_categor
|
|||
create_partitioned_table(connection, partitioned_table_name)
|
||||
|
||||
stub_feature_flags(automatic_lock_writes_on_partition_tables: ff_enabled)
|
||||
|
||||
sync_partitions
|
||||
end
|
||||
|
||||
where(:gitlab_schema, :database, :expectation) do
|
||||
|
|
|
|||
|
|
@ -0,0 +1,158 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe SetMissingTraversalIdsForVulnerabilityReads, feature_category: :vulnerability_management do
|
||||
let(:vulnerability_reads) { table(:vulnerability_reads) }
|
||||
let(:vulnerability_scanners) { table(:vulnerability_scanners) }
|
||||
let(:identifiers) { table(:vulnerability_identifiers) }
|
||||
let(:vulnerabilities) { table(:vulnerabilities) }
|
||||
let(:findings) { table(:vulnerability_occurrences) }
|
||||
let(:namespaces) { table(:namespaces) }
|
||||
let(:projects) { table(:projects) }
|
||||
let(:users) { table(:users) }
|
||||
|
||||
let(:user) { users.create!(name: 'John Doe', email: 'test@example.com', projects_limit: 5) }
|
||||
|
||||
let(:namespace_1) { namespaces.create!(name: 'Namespace 1', path: 'namespace-1', traversal_ids: [1]) }
|
||||
let(:namespace_2) { namespaces.create!(name: 'Namespace 2', path: 'namespace-2', traversal_ids: [2]) }
|
||||
|
||||
let(:scanner) { vulnerability_scanners.create!(project_id: project_1.id, external_id: 'gitlab', name: 'GitLab') }
|
||||
|
||||
let(:project_1) do
|
||||
projects.create!(
|
||||
namespace_id: namespace_1.id,
|
||||
project_namespace_id: namespace_1.id,
|
||||
name: 'Project 1',
|
||||
path: 'project-1'
|
||||
)
|
||||
end
|
||||
|
||||
let(:project_2) do
|
||||
projects.create!(
|
||||
namespace_id: namespace_2.id,
|
||||
project_namespace_id: namespace_2.id,
|
||||
name: 'Project 2',
|
||||
path: 'project-2'
|
||||
)
|
||||
end
|
||||
|
||||
let(:identifier) do
|
||||
identifiers.create!(
|
||||
project_id: project_1.id,
|
||||
fingerprint: 'foo',
|
||||
external_type: 'cve',
|
||||
external_id: '1234',
|
||||
name: 'cve-1234'
|
||||
)
|
||||
end
|
||||
|
||||
let(:finding_1) do
|
||||
findings.create!(
|
||||
project_id: project_1.id,
|
||||
primary_identifier_id: identifier.id,
|
||||
scanner_id: scanner.id,
|
||||
severity: 5,
|
||||
confidence: 5,
|
||||
report_type: 1,
|
||||
uuid: SecureRandom.uuid,
|
||||
project_fingerprint: '',
|
||||
location_fingerprint: '',
|
||||
metadata_version: '1',
|
||||
raw_metadata: '{}',
|
||||
name: 'finding 1'
|
||||
)
|
||||
end
|
||||
|
||||
let(:finding_2) do
|
||||
findings.create!(
|
||||
project_id: project_1.id,
|
||||
primary_identifier_id: identifier.id,
|
||||
scanner_id: scanner.id,
|
||||
severity: 5,
|
||||
confidence: 5,
|
||||
report_type: 1,
|
||||
uuid: SecureRandom.uuid,
|
||||
project_fingerprint: '',
|
||||
location_fingerprint: '',
|
||||
metadata_version: '1',
|
||||
raw_metadata: '{}',
|
||||
name: 'finding 2'
|
||||
)
|
||||
end
|
||||
|
||||
let(:vulnerability_1) do
|
||||
vulnerabilities.create!(
|
||||
project_id: project_1.id,
|
||||
finding_id: finding_1.id,
|
||||
author_id: user.id,
|
||||
title: "Vulnerability 1",
|
||||
severity: 5,
|
||||
confidence: 5,
|
||||
report_type: 1
|
||||
)
|
||||
end
|
||||
|
||||
let(:vulnerability_2) do
|
||||
vulnerabilities.create!(
|
||||
project_id: project_2.id,
|
||||
finding_id: finding_2.id,
|
||||
author_id: user.id,
|
||||
title: "Vulnerability 2",
|
||||
severity: 5,
|
||||
confidence: 5,
|
||||
report_type: 1
|
||||
)
|
||||
end
|
||||
|
||||
let!(:vulnerability_read_1) do
|
||||
vulnerability_reads.create!(
|
||||
project_id: project_1.id,
|
||||
namespace_id: namespace_1.id,
|
||||
vulnerability_id: vulnerability_1.id,
|
||||
scanner_id: scanner.id,
|
||||
uuid: SecureRandom.uuid,
|
||||
severity: 5,
|
||||
report_type: 1,
|
||||
state: 1,
|
||||
traversal_ids: [0]
|
||||
)
|
||||
end
|
||||
|
||||
let!(:vulnerability_read_2) do
|
||||
vulnerability_reads.create!(
|
||||
project_id: project_2.id,
|
||||
namespace_id: namespace_2.id,
|
||||
vulnerability_id: vulnerability_2.id,
|
||||
scanner_id: scanner.id,
|
||||
uuid: SecureRandom.uuid,
|
||||
severity: 5,
|
||||
report_type: 1,
|
||||
state: 1
|
||||
)
|
||||
end
|
||||
|
||||
describe "#up" do
|
||||
before do
|
||||
allow(Gitlab).to receive(:com?).and_return(gitlab_com?)
|
||||
end
|
||||
|
||||
context 'when is not running for GitLab.com' do
|
||||
let(:gitlab_com?) { false }
|
||||
|
||||
it 'does not update the records' do
|
||||
expect { migrate! }.not_to change { vulnerability_read_2.reload.traversal_ids }
|
||||
end
|
||||
end
|
||||
|
||||
context 'when running for GitLab.com' do
|
||||
let(:gitlab_com?) { true }
|
||||
|
||||
it 'sets the traversal_ids for missing records' do
|
||||
expect { migrate! }.to change { vulnerability_read_2.reload.traversal_ids }.to(namespace_2.traversal_ids)
|
||||
.and not_change { vulnerability_read_1.reload.traversal_ids }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -232,8 +232,7 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati
|
|||
create(:ci_build_need, build: test_job, name: 'my test job')
|
||||
end
|
||||
|
||||
it 'reports the build needs and execution requirements',
|
||||
quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/448867' do
|
||||
it 'reports the build needs and execution requirements' do
|
||||
post_graphql(query, current_user: user)
|
||||
|
||||
expect(jobs_graphql_data).to contain_exactly(
|
||||
|
|
|
|||
|
|
@ -350,6 +350,18 @@ RSpec.describe 'GraphQL', feature_category: :shared do
|
|||
|
||||
expect(graphql_data['currentUser']).to be_nil
|
||||
end
|
||||
|
||||
context 'when graphql_minimal_auth_methods FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(graphql_minimal_auth_methods: false)
|
||||
end
|
||||
|
||||
it 'authenticates users with an LFS token' do
|
||||
post '/api/graphql.git', params: { query: query }, headers: headers
|
||||
|
||||
expect(graphql_data['currentUser']['username']).to eq(user.username)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'with job token' do
|
||||
|
|
@ -367,6 +379,18 @@ RSpec.describe 'GraphQL', feature_category: :shared do
|
|||
|
||||
expect_graphql_errors_to_include(/Invalid token/)
|
||||
end
|
||||
|
||||
context 'when graphql_minimal_auth_methods FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(graphql_minimal_auth_methods: false)
|
||||
end
|
||||
|
||||
it 'authenticates as the user' do
|
||||
post '/api/graphql', params: { query: query, job_token: job_token }
|
||||
|
||||
expect(graphql_data['currentUser']['username']).to eq(user.username)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'with static object token' do
|
||||
|
|
@ -385,6 +409,25 @@ RSpec.describe 'GraphQL', feature_category: :shared do
|
|||
|
||||
expect_graphql_errors_to_include(/Invalid token/)
|
||||
end
|
||||
|
||||
# context is included to demonstrate that the FF code is not changing this behavior
|
||||
context 'when graphql_minimal_auth_methods FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(graphql_minimal_auth_methods: false)
|
||||
end
|
||||
|
||||
it 'does not authenticate user from header' do
|
||||
post '/api/graphql', params: { query: query }, headers: headers
|
||||
|
||||
expect(graphql_data['currentUser']).to be_nil
|
||||
end
|
||||
|
||||
it 'does not authenticate user from parameter' do
|
||||
post "/api/graphql?token=#{user.static_object_token}", params: { query: query }
|
||||
|
||||
expect_graphql_errors_to_include(/Invalid token/)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'with dependency proxy token' do
|
||||
|
|
@ -405,6 +448,25 @@ RSpec.describe 'GraphQL', feature_category: :shared do
|
|||
|
||||
expect_graphql_errors_to_include(/Invalid token/)
|
||||
end
|
||||
|
||||
# context is included to demonstrate that the FF code is not changing this behavior
|
||||
context 'when graphql_minimal_auth_methods FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(graphql_minimal_auth_methods: false)
|
||||
end
|
||||
|
||||
it 'does not authenticate user from dependency proxy token in headers' do
|
||||
post '/api/graphql', params: { query: query }, headers: headers
|
||||
|
||||
expect_graphql_errors_to_include(/Invalid token/)
|
||||
end
|
||||
|
||||
it 'does not authenticate user from dependency proxy token in parameter' do
|
||||
post "/api/graphql?access_token=#{token}", params: { query: query }
|
||||
|
||||
expect_graphql_errors_to_include(/Invalid token/)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -71,12 +71,6 @@ RSpec.describe Ci::CreatePipelineService, '#execute', :ci_config_feature_flag_co
|
|||
end
|
||||
|
||||
context 'when sidekiq processes the job', :sidekiq_inline do
|
||||
before do
|
||||
allow_next_instance_of(Ci::ResourceGroups::AssignResourceFromResourceGroupService) do |resource_service|
|
||||
allow(resource_service).to receive(:respawn_assign_resource_worker)
|
||||
end
|
||||
end
|
||||
|
||||
it 'transitions to pending status and triggers a downstream pipeline' do
|
||||
pipeline = create_pipeline!
|
||||
|
||||
|
|
|
|||
|
|
@ -127,12 +127,6 @@ RSpec.describe Ci::CreatePipelineService, '#execute', :ci_config_feature_flag_co
|
|||
end
|
||||
|
||||
context 'when sidekiq processes the job', :sidekiq_inline do
|
||||
before do
|
||||
allow_next_instance_of(Ci::ResourceGroups::AssignResourceFromResourceGroupService) do |resource_service|
|
||||
allow(resource_service).to receive(:respawn_assign_resource_worker)
|
||||
end
|
||||
end
|
||||
|
||||
it 'transitions to pending status and triggers a downstream pipeline' do
|
||||
pipeline = create_pipeline!
|
||||
|
||||
|
|
|
|||
|
|
@ -211,8 +211,8 @@ RSpec.describe Ci::ResourceGroups::AssignResourceFromResourceGroupService, featu
|
|||
expect(ci_build.reload).to be_waiting_for_resource
|
||||
end
|
||||
|
||||
it 're-spawns the new worker for assigning a resource' do
|
||||
expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorkerV2).to receive(:perform_in).with(1.minute, resource_group.id)
|
||||
it 'does not re-spawn the new worker for assigning a resource' do
|
||||
expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorkerV2).not_to receive(:perform_in)
|
||||
|
||||
subject
|
||||
end
|
||||
|
|
@ -222,87 +222,13 @@ RSpec.describe Ci::ResourceGroups::AssignResourceFromResourceGroupService, featu
|
|||
stub_feature_flags(assign_resource_worker_deduplicate_until_executing: false)
|
||||
end
|
||||
|
||||
it 're-spawns the old worker for assigning a resource' do
|
||||
expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorker).to receive(:perform_in).with(1.minute, resource_group.id)
|
||||
it 'does not re-spawn the old worker for assigning a resource' do
|
||||
expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorker).not_to receive(:perform_in)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are no upcoming processables' do
|
||||
before do
|
||||
ci_build.update!(status: :success)
|
||||
end
|
||||
|
||||
it 'does not re-spawn the new worker for assigning a resource' do
|
||||
expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorkerV2).not_to receive(:perform_in)
|
||||
|
||||
subject
|
||||
end
|
||||
|
||||
context 'when `assign_resource_worker_deduplicate_until_executing` FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(assign_resource_worker_deduplicate_until_executing: false)
|
||||
end
|
||||
|
||||
it 'does not re-spawn the old worker for assigning a resource' do
|
||||
expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorker).not_to receive(:perform_in)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are no waiting processables and process_mode is ordered' do
|
||||
let(:resource_group) { create(:ci_resource_group, process_mode: :oldest_first, project: project) }
|
||||
|
||||
before do
|
||||
ci_build.update!(status: :created)
|
||||
end
|
||||
|
||||
it 'does not re-spawn the new worker for assigning a resource' do
|
||||
expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorkerV2).not_to receive(:perform_in)
|
||||
|
||||
subject
|
||||
end
|
||||
|
||||
context 'when `assign_resource_worker_deduplicate_until_executing` FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(assign_resource_worker_deduplicate_until_executing: false)
|
||||
end
|
||||
|
||||
it 'does not re-spawn the old worker for assigning a resource' do
|
||||
expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorker).not_to receive(:perform_in)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when :respawn_assign_resource_worker FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(respawn_assign_resource_worker: false)
|
||||
end
|
||||
|
||||
it 'does not re-spawn the new worker for assigning a resource' do
|
||||
expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorkerV2).not_to receive(:perform_in)
|
||||
|
||||
subject
|
||||
end
|
||||
|
||||
context 'when `assign_resource_worker_deduplicate_until_executing` FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(assign_resource_worker_deduplicate_until_executing: false)
|
||||
end
|
||||
|
||||
it 'does not re-spawn the old worker for assigning a resource' do
|
||||
expect(Ci::ResourceGroups::AssignResourceFromResourceGroupWorker).not_to receive(:perform_in)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there is a stale build assigned to a resource' do
|
||||
before do
|
||||
other_build.doom!
|
||||
|
|
|
|||
|
|
@ -35,18 +35,6 @@ RSpec.describe Packages::TerraformModule::CreatePackageService, feature_category
|
|||
|
||||
context 'valid package' do
|
||||
it_behaves_like 'creating a package'
|
||||
|
||||
context 'when index_terraform_module_archive feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(index_terraform_module_archive: false)
|
||||
end
|
||||
|
||||
it 'does not enqueue the ProcessPackageFileWorker' do
|
||||
expect(::Packages::TerraformModule::ProcessPackageFileWorker).not_to receive(:perform_async)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'package already exists elsewhere' do
|
||||
|
|
|
|||
|
|
@ -109,7 +109,7 @@ RSpec::Matchers.define :have_graphql_arguments do |*expected|
|
|||
def expected_names(field)
|
||||
@names ||= Array.wrap(expected).map { |name| GraphqlHelpers.fieldnamerize(name) }
|
||||
|
||||
if field.type.try(:ancestors)&.include?(GraphQL::Types::Relay::BaseConnection)
|
||||
if field.try(:type).try(:ancestors)&.include?(GraphQL::Types::Relay::BaseConnection)
|
||||
@names | %w[after before first last]
|
||||
else
|
||||
@names
|
||||
|
|
|
|||
Loading…
Reference in New Issue