Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-06-20 00:16:12 +00:00
parent 3bf9a6863b
commit 0f437af068
28 changed files with 225 additions and 43 deletions

View File

@ -2,17 +2,6 @@
# Cop supports --autocorrect.
Layout/SpaceInLambdaLiteral:
Exclude:
- 'ee/app/serializers/blocking_merge_request_entity.rb'
- 'ee/app/serializers/clusters/environment_entity.rb'
- 'ee/app/serializers/dashboard_operations_project_entity.rb'
- 'ee/app/serializers/ee/blob_entity.rb'
- 'ee/app/serializers/ee/environment_entity.rb'
- 'ee/app/serializers/ee/evidences/release_entity.rb'
- 'ee/app/serializers/ee/issue_entity.rb'
- 'ee/app/serializers/ee/issue_sidebar_basic_entity.rb'
- 'ee/app/serializers/ee/issue_sidebar_extras_entity.rb'
- 'ee/app/serializers/ee/merge_request_poll_cached_widget_entity.rb'
- 'ee/app/serializers/ee/merge_request_widget_entity.rb'
- 'ee/app/serializers/ee/note_entity.rb'
- 'ee/app/serializers/epic_base_entity.rb'
- 'ee/app/serializers/epic_entity.rb'

View File

@ -208,7 +208,7 @@ gem 'elasticsearch-rails', '~> 7.2', require: 'elasticsearch/rails/instrumentati
gem 'elasticsearch-api', '7.13.3' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'aws-sdk-core', '~> 3.197.0' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'aws-sdk-cloudformation', '~> 1' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'aws-sdk-s3', '~> 1.151.0' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'aws-sdk-s3', '~> 1.152.0' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'faraday_middleware-aws-sigv4', '~>0.3.0' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'typhoeus', '~> 1.4.0' # Used with Elasticsearch to support http keep-alive connections # rubocop:todo Gemfile/MissingFeatureCategory

View File

@ -38,7 +38,7 @@
{"name":"aws-sdk-cloudformation","version":"1.41.0","platform":"ruby","checksum":"31e47539719734413671edf9b1a31f8673fbf9688549f50c41affabbcb1c6b26"},
{"name":"aws-sdk-core","version":"3.197.0","platform":"ruby","checksum":"34c44883d3cc91ada382f6ecab981a1b7ede9a1ec47cf8eb2eaa3ee46035db90"},
{"name":"aws-sdk-kms","version":"1.76.0","platform":"ruby","checksum":"e7f75013cba9ba357144f66bbc600631c192e2cda9dd572794be239654e2cf49"},
{"name":"aws-sdk-s3","version":"1.151.0","platform":"ruby","checksum":"9e40e64f3ea112b33fdbb0416b6b44247372b983f6a7a9c30fa9b5627a4f7008"},
{"name":"aws-sdk-s3","version":"1.152.0","platform":"ruby","checksum":"f502f292b691ea45db0b4ac8f04ff54ed5625d647340f93c7e1b4a91ea08d720"},
{"name":"aws-sigv4","version":"1.8.0","platform":"ruby","checksum":"84dd99768b91b93b63d1d8e53ee837cfd06ab402812772a7899a78f9f9117cbc"},
{"name":"axe-core-api","version":"4.8.0","platform":"ruby","checksum":"88cf44fdbd5d501ae429f9ca6b37c4a46ba27ac673d478ab688eea3e353da62f"},
{"name":"axe-core-rspec","version":"4.9.0","platform":"ruby","checksum":"e5f81fa55af0c421254c98476511c4511e193c5659996f184541f74a1359df3a"},

View File

@ -312,8 +312,8 @@ GEM
aws-sdk-kms (1.76.0)
aws-sdk-core (~> 3, >= 3.188.0)
aws-sigv4 (~> 1.1)
aws-sdk-s3 (1.151.0)
aws-sdk-core (~> 3, >= 3.194.0)
aws-sdk-s3 (1.152.0)
aws-sdk-core (~> 3, >= 3.197.0)
aws-sdk-kms (~> 1)
aws-sigv4 (~> 1.8)
aws-sigv4 (1.8.0)
@ -1926,7 +1926,7 @@ DEPENDENCIES
awesome_print
aws-sdk-cloudformation (~> 1)
aws-sdk-core (~> 3.197.0)
aws-sdk-s3 (~> 1.151.0)
aws-sdk-s3 (~> 1.152.0)
axe-core-rspec (~> 4.9.0)
babosa (~> 2.0)
base32 (~> 0.3.0)

View File

@ -177,7 +177,7 @@ export default {
<template>
<div>
<div class="flash-container js-suggestions-flash"></div>
<div class="flash-container js-suggestions-flash gl-white-space-pre-line"></div>
<div
v-show="isRendered"
ref="container"

View File

@ -15,6 +15,7 @@ module Organizations
has_many :namespaces
has_many :groups
has_many :projects
has_many :snippets
has_one :settings, class_name: "OrganizationSetting"
has_one :organization_detail, inverse_of: :organization, autosave: true

View File

@ -47,6 +47,7 @@ class Snippet < ApplicationRecord
belongs_to :author, class_name: 'User'
belongs_to :project
belongs_to :organization, class_name: 'Organizations::Organization'
alias_method :resource_parent, :project
has_many :notes, as: :noteable, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent

View File

@ -389,6 +389,10 @@ security_scans:
- table: p_ci_builds
column: build_id
on_delete: async_delete
snippets:
- table: organizations
column: organization_id
on_delete: async_nullify
terraform_state_versions:
- table: ci_builds
column: ci_build_id

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
class AddOrganizationIdToSnippets < Gitlab::Database::Migration[2.2]
DEFAULT_ORGANIZATION_ID = 1
milestone '17.2'
enable_lock_retries!
def change
add_column :snippets, :organization_id, :bigint, default: DEFAULT_ORGANIZATION_ID, null: true
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class AddIndexForOrganizationIdOnSnippets < Gitlab::Database::Migration[2.2]
milestone '17.2'
disable_ddl_transaction!
TABLE_NAME = :snippets
INDEX_NAME = 'index_snippets_on_organization_id'
def up
add_concurrent_index TABLE_NAME, :organization_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name TABLE_NAME, INDEX_NAME
end
end

View File

@ -0,0 +1 @@
73631774f68f3d4012b413d549458316c70e5828b83b387d77329249fa661514

View File

@ -0,0 +1 @@
5eaea04d560ff1d1a5c604aba0f847edfc03cafb0b8b9103677e1061608109a8

View File

@ -17296,7 +17296,8 @@ CREATE TABLE snippets (
secret boolean DEFAULT false NOT NULL,
repository_read_only boolean DEFAULT false NOT NULL,
imported smallint DEFAULT 0 NOT NULL,
imported_from smallint DEFAULT 0 NOT NULL
imported_from smallint DEFAULT 0 NOT NULL,
organization_id bigint DEFAULT 1
);
CREATE SEQUENCE snippets_id_seq
@ -28618,6 +28619,8 @@ CREATE INDEX index_snippets_on_id_and_created_at ON snippets USING btree (id, cr
CREATE INDEX index_snippets_on_id_and_type ON snippets USING btree (id, type);
CREATE INDEX index_snippets_on_organization_id ON snippets USING btree (organization_id);
CREATE INDEX index_snippets_on_project_id_and_title ON snippets USING btree (project_id, title);
CREATE INDEX index_snippets_on_project_id_and_visibility_level ON snippets USING btree (project_id, visibility_level);

View File

@ -37,6 +37,13 @@ All deprecations and changes between versions are in the documentation.
Only API version v4 is available.
### Breaking change exemptions
Elements labeled as [experimental or beta](../../policy/experiment-beta-support.md) in the [REST API resources](../api_resources.md) are exempt from the deprecation process.
These parts can be removed or changed at any time without notice.
Fields behind a feature flag and disabled by default do not follow the deprecation and removal process. These fields can be removed at any time without notice.
## How to use the API
API requests must include both `api` and the API version. The API

View File

@ -24,25 +24,24 @@ the AI Gateway.
In order to address this the Duo Workflow functionality will be comprised of 2
separate components:
1. The Duo Workflow Service which is a Python based service we run in our
infrastructure. This is built on top of
1. The Duo Workflow Service, which is a Python service we run in our
infrastructure. The Workflow Service is built on top of
[LangGraph](https://github.com/langchain-ai/langgraph).
1. The Duo Worklow Executor which is a Go binary that communicates via long
running gRPC connection to Duo Workflow Service and executes the arbtitrary
commands. It will be possible for users to run this locally or in CI
pipelines
1. The Duo Worklow Executor, which is a Go binary that communicates via long
running gRPC connection to the Duo Workflow Service and executes the arbtitrary
commands. It will be possible for users to run this locally or in CI pipelines.
In our first release we will support 2 execution modes:
1. Local Executor: which will run commands and edit files locally in a
sandboxed Docker container on the developer machine. They will be able to
see the files being edited live and it will be interactive
1. CI Executor: For all non-local usecases of Duo Workflow (e.g. issue/epic based workflows)
these will be triggered by the GitLab UI and will create a CI Pipeline to
run the Duo Workflow Executor
1. CI Executor: All non-local use-cases of Duo Workflow (for example:
issue/epic based workflows) will be triggered by the GitLab UI and will
create a CI Pipeline to run the Duo Workflow Executor
Our architecture will also support mixed deployments for self-managed such that
some features of Duo Workflow will be available using a cloud hosted AI
some features of Duo Workflow will be available using a cloud-hosted AI
Gateway.
### Detailed plan
@ -55,12 +54,16 @@ run in multiple runtimes:
points in the GitLab application but there should be a central workflow UI
with reusable components (e.g. Vue components) that could be embedded into
our editor extensions
1. The Duo Workflow Service. This will be a Python based service we deploy with
a gRPC API. The only interface to this will be the gRPC interface which is
called from the Duo Workflow Executor. Internally this will use LangGraph to
execute the workflows. It will not have any persisted state but the state of
1. The Duo Workflow Service. This is a Python-based service we deploy with
a gRPC API. The only interface to this is the gRPC interface, which is
called from the Duo Workflow Executor. Internally, this will use LangGraph to
execute the workflows. For reasons why LangGraph was chosen, see [this work item](https://gitlab.com/gitlab-org/gitlab/-/work_items/457958).
The Workflow Service will not have any persisted state but the state of
running workflows will be kept in memory and periodically checkpointed in
GitLab.
GitLab. The Workflow Service is built into the existing
[AI Gateway codebase](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist)
but will have its own deployment. This deployment will take advantage of
Runway's [multiple deployments from one service repository](https://gitlab.com/gitlab-com/gl-infra/platform/runway/docs/-/blob/master/src/content/docs/guides/onboarding.md) feature.
1. The Duo Workflow Executor. This will be written in Go for easy installation
in development containers. This component will run in CI jobs or on a user's
local workstation. In the local workstation it will run sandboxed in a
@ -72,15 +75,15 @@ run in multiple runtimes:
The following are important constraints of the architecture:
1. All state management for workflows will be inside GitLab.
1. Duo Workflow Service is expected to periodically checkpoint it's state in GitLab
1. Duo Workflow Service is expected to periodically checkpoint its state in GitLab
1. Duo Workflow Service in-memory state can be dropped/lost at any time so
checkpointing will be the only guaranteed point that can be returned to
1. If a local Duo Workflow Executor drops connection then the Duo Workflow
1. If a local Duo Workflow Executor drops the connection, the Duo Workflow
Service will checkpoint and shutdown the state as soon as it runs into
something where it is waiting on the executor
1. In order to avoid multiple Duo Workflow Service instances running on the
same workflow the Duo Workflow Service will always acquire a lock with
GitLab before it starts running. When it suspends it will release the lock and
same workflow, the Duo Workflow Service will always acquire a lock with
GitLab before it starts running. When it suspends, it will release the lock and
similarly there will be a timeout state if it has not checkpointed in the
last 60 seconds. GitLab will not accept checkpoints from a timed out run of
the Duo Workflow Service.
@ -269,6 +272,103 @@ the workflow.
Consideration should also be made to cleanup Git refs over time after some
workflow expiration period.
### Authentication
Duo Workflow requires several authentication flows.
In this section, each connection that requires authentication is listed and the
authentication mechanism is discussed.
#### Local Duo Workflow Executor -> Duo Workflow Service (AI Gateway)
When a Duo Workflow starts, the Duo Workflow Executor must connect to the AI Gateway.
To authenticate this connection:
1. The IDE will use the OAuth token of Personal Access Token (PAT) that the user
generated while setting up the GitLab editor extension.
1. The IDE uses that token to authenticate a request to a GitLab Rails API
endpoint to obtain a short-lived user- and system-scoped JWT.
1. When the GitLab Rails instance receives this request, it loads its
instance-scoped JWT (synced daily from CustomersDot) and contacts the AI
gateway to swap this instance token for the above-mentioned user-scoped token
(also cryptographically signed)
1. GitLab Rails returns this JWT to the IDE.
1. The IDE passes on this JWT to the local Duo Workflow Executor component.
1. The Duo Workflow Executor uses this JWT to authenticate the Duo Workflow
Service gRPC connection.
This flow mimics the
[token flow that allows IDEs to connect direct to the AI Gateway](https://gitlab.com/groups/gitlab-org/-/epics/13252).
#### CI Duo Workflow Executor -> Duo Workflow Service (AI Gateway)
When a Duo Workflow is executed by a CI Runner, the Duo Workflow Executor must
connect to the AI Gateway.
A CI Pipeline is created by GitLab, so there is no need to query a GitLab Rails
API endpoint to obtain a short-lived user- and system-scoped JWT. Instead, in
the process of creating the CI pipeline, GitLab Rails will:
1. Generate the user-scoped JWT.
1. Inject the JWT as an environment variable (for example: `DUO_WORKFLOW_TOKEN`)
in the CI pipeline.
1. The Duo Workflow Executor running inside the CI job uses this environment
variable value to authenticate the Duo Workflow Service gRPC connection.
#### Duo Workflow Service (AI Gateway) -> GitLab Rails API
Reasons that the AI Gateway must be able to authenticate requests to the GitLab Rails API:
1. The Duo Workflow Service will need to periodically make requests to GitLab Rails
to sync workflow state. This means that the AI Gateway must be able to
authenticate these requests.
1. Duo Workflow may need to make other GitLab Rails API queries to gather
context. For example, a Duo Workflow for "solve issue with code" would
require an API request to retrieve the issue content.
1. The end state of a Duo Workflow may take the form of a generated artifact
(for example, Git commit or pull request) on the GitLab platform. To
generate this artifact, the AI Gateway must be able to make API requests to
GitLab Rails.
Requirements for the token used to authenticate requests from the AI Gateway to
the GitLab Rails API:
1. Any artifacts created by a Duo Workflow must be auditable in order
to maintain transparency about AI-generated activities on the GitLab platform.
1. The token's access level must match the access level of the user who
initiated the Workflow to ensure that there is no privilege escalation.
1. We must have the ability to block read/write for all resources that belong to
instances/projects/groups with `duo_features_enabled` set to false.
1. Token must be valid for as long as it takes an agent to execute or be
refreshable by the AI Gateway. Workflow execution may take several hours.
The JWT that the Workflow Executor uses to authenticate to the AI Gateway could
potentially be adapted to also work for this use-case but has some problems:
1. Need to update GitLab Rails to accept this type of token for API authentication.
1. JWTs are not revocable; what if we need to cut off an agent's access?
1. Need to build token rotation. How would the AI Gateway authenticate an API
request to generate a new token if the old JWT is already expired?
For these reasons, OAuth is a better protocol for this use-case. OAuth tokens:
1. Are only valid for 2 hours.
1. Can be revoked.
1. Have a built-in refresh flow.
1. Are an established authentication pattern for federating access between
services.
To use OAuth, we will:
1. Create a new token scope called `ai_workflows` ([related issue](https://gitlab.com/gitlab-org/gitlab/-/issues/467160))
1. Create a new API endpoint in GitLab Rails that accepts the JWT.
1. That endpoint will generate an OAuth token with the `ai_workflows` scope.
1. The AI Gateway will exchange the user-scoped JWT for a user-scoped
`ai_workflows` OAuth token.
1. Use the OAuth token for any GitLab Rails API Requests to read or write data
for a Workflow.
### Options we've considered and pros/cons
#### Delegate only unsafe execution to local/CI pipelines

View File

@ -81,6 +81,7 @@ The exception is only when:
- A feature must be removed in a major GitLab release.
- Backward compatibility cannot be maintained
[in any form](#accommodating-backward-compatibility-instead-of-breaking-changes).
- The feature was previously [marked as experimental or beta](#experimental-beta-and-generally-available-features).
This exception should be rare.
@ -111,6 +112,36 @@ Some examples of non-breaking changes:
- Changes from a `500` status code to [any supported status code](../api/rest/index.md#status-codes) (this is a bugfix).
- Changes to the order of fields returned in a response.
## Experimental, beta, and generally available features
You can add API elements as [experimental and beta features](../policy/experiment-beta-support.md). They must be additive changes, otherwise they are categorized as
[a breaking change](#what-is-not-a-breaking-change).
API elements marked as experiment or beta are exempt from the [ensuring backward compatibility](#accommodating-backward-compatibility-instead-of-breaking-changes) policy,
and can be changed or removed at any time without prior notice.
While in the [experiment status](../policy/experiment-beta-support.md#experiment):
- Use a feature flag that is [off by default](feature_flags/index.md#beta-type).
- When the flag is off:
- Any added endpoints must return `404 Not Found`.
- Any added arguments must be ignored.
- Any added fields must not be exposed.
- The [API documentation](../api/api_resources.md) must [document the experimental status](documentation/experiment_beta.md) and the feature flag [must be documented](documentation/feature_flags.md).
- The [OpenAPI documentation](../api/openapi/openapi_interactive.md) should not describe the changes.
While in the [beta status](../policy/experiment-beta-support.md#beta):
- Use a feature flag that is [on by default](feature_flags/index.md#beta-type).
- The [API documentation](../api/api_resources.md) must [document the beta status](documentation/experiment_beta.md) and the feature flag [must be documented](documentation/feature_flags.md).
- The [OpenAPI documentation](../api/openapi/openapi_interactive.md) should not describe the changes.
When the feature becomes [generally available](../policy/experiment-beta-support.md#generally-available-ga):
- [Remove](feature_flags/controls.md#cleaning-up) the feature flag.
- Remove the [experiment or beta status](documentation/experiment_beta.md) from the [API documentation](../api/api_resources.md).
- Add the [OpenAPI documentation](../api/openapi/openapi_interactive.md) to make the changes programatically discoverable.
## Declared parameters
Grape allows you to access only the parameters that have been declared by your

View File

@ -93,7 +93,7 @@ module Gitlab
'Secret push protection is triggered when commits are pushed to a repository. ' \
'If any secrets are detected, the push is blocked.'),
help_path: Gitlab::Routing.url_helpers.help_page_path(
'user/application_security/secret_detection/pre_receive/index'),
'user/application_security/secret_detection/secret_push_protection/index'),
type: 'pre_receive_secret_detection'
},
secret_detection: {

View File

@ -19,7 +19,8 @@ RSpec.describe 'Database schema', feature_category: :database do
users: [%w[accepted_term_id]],
ci_builds: [%w[partition_id stage_id], %w[partition_id execution_config_id]], # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/142804#note_1745483081
p_ci_builds: [%w[partition_id stage_id], %w[partition_id execution_config_id]], # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/142804#note_1745483081
ai_testing_terms_acceptances: %w[user_id] # testing terms only have 1 entry, and if the user is deleted the record should remain
ai_testing_terms_acceptances: %w[user_id], # testing terms only have 1 entry, and if the user is deleted the record should remain
snippets: %w[organization_id] # this index is added in an async manner, hence it needs to be ignored in the first phase.
}.with_indifferent_access.freeze
TABLE_PARTITIONS = %w[ci_builds_metadata].freeze

View File

@ -202,7 +202,7 @@ export const preReceiveSecretDetectionMock = {
'If any secrets are detected, the push is blocked.`,
helpPath: SAST_HELP_PATH,
configurationHelpPath: helpPagePath(
'user/application_security/secret_detection/pre_receive/index',
'user/application_security/secret_detection/secret_push_protection/index',
),
type: PRE_RECEIVE_SECRET_DETECTION,
available: true,

View File

@ -165,6 +165,7 @@ snippets:
- snippet_repository
- statistics
- repository_storage_moves
- organization
releases:
- author
- project

View File

@ -142,7 +142,7 @@ RSpec.describe ::Gitlab::Security::ScanConfiguration do
"Secret push protection is triggered when commits are pushed to a repository. " \
"If any secrets are detected, the push is blocked.",
help_path: Gitlab::Routing.url_helpers.help_page_path(
"user/application_security/secret_detection/pre_receive/index"),
"user/application_security/secret_detection/secret_push_protection/index"),
type: "pre_receive_secret_detection" }
:secret_detection | { name: "Pipeline Secret Detection",
description: "Analyze your source code and Git history for secrets by using CI/CD pipelines.",

View File

@ -14,6 +14,7 @@ RSpec.describe Organizations::Organization, type: :model, feature_category: :cel
it { is_expected.to have_many(:users).through(:organization_users).inverse_of(:organizations) }
it { is_expected.to have_many(:organization_users).inverse_of(:organization) }
it { is_expected.to have_many :projects }
it { is_expected.to have_many :snippets }
end
describe 'validations' do

View File

@ -16,6 +16,7 @@ RSpec.describe Snippet, feature_category: :source_code_management do
end
describe 'associations' do
it { is_expected.to belong_to(:organization) }
it { is_expected.to belong_to(:author).class_name('User') }
it { is_expected.to belong_to(:project) }
it { is_expected.to have_many(:notes).dependent(:destroy) }
@ -515,6 +516,15 @@ RSpec.describe Snippet, feature_category: :source_code_management do
it { is_expected.to match_array(snippet) }
end
describe 'with loose foreign keys' do
context 'on organization_id' do
it_behaves_like 'cleanup by a loose foreign key' do
let_it_be(:parent) { create(:organization) }
let_it_be(:model) { create(:snippet, organization: parent) }
end
end
end
describe '#participants' do
let_it_be(:project) { create(:project, :public) }
let_it_be(:snippet) { create(:snippet, content: 'foo', project: project) }

View File

@ -4,7 +4,7 @@ RSpec.shared_examples 'protected ref access' do |association|
include ExternalAuthorizationServiceHelpers
let_it_be(:project) { create(:project) }
let_it_be(:protected_ref) { create(association, project: project) } # rubocop:disable Rails/SaveBang
let_it_be(:protected_ref) { create(association, project: project) } # rubocop:disable Rails/SaveBang -- False positive because factory name is dynamic
describe 'validations' do
subject { build(described_class.model_name.singular) }

View File

@ -4,7 +4,7 @@ RSpec.shared_examples 'protected ref deploy_key access' do
let_it_be(:described_instance) { described_class.model_name.singular }
let_it_be(:protected_ref_name) { described_class.module_parent.model_name.singular }
let_it_be(:project) { create(:project) }
let_it_be(:protected_ref) { create(protected_ref_name, project: project) } # rubocop:disable Rails/SaveBang
let_it_be(:protected_ref) { create(protected_ref_name, project: project) } # rubocop:disable Rails/SaveBang -- False positive because factory name is dynamic
describe 'associations' do
it { is_expected.to belong_to(:deploy_key) }