Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-06-04 09:26:35 +00:00
parent 8ba8b01b4e
commit 0fe671b054
71 changed files with 1248 additions and 708 deletions

View File

@ -530,6 +530,7 @@ lib/gitlab/checks/**
/doc/administration/configure.md @axil
/doc/administration/consul.md @axil
/doc/administration/credentials_inventory.md @jglassman1
/doc/administration/custom_html_header_tags.md @eread
/doc/administration/custom_project_templates.md @msedlakjakubowski
/doc/administration/dedicated/ @lyspin
/doc/administration/diff_limits.md @msedlakjakubowski
@ -597,7 +598,7 @@ lib/gitlab/checks/**
/doc/administration/repository_checks.md @eread
/doc/administration/repository_storage_paths.md @eread
/doc/administration/restart_gitlab.md @axil
/doc/administration/self_hosted_models/ @sselhorn
/doc/administration/self_hosted_models/ @sselhorn @jglassman1
/doc/administration/server_hooks.md @eread
/doc/administration/settings/account_and_limit_settings.md @msedlakjakubowski
/doc/administration/settings/continuous_integration.md @marcel.amirault @lyspin
@ -754,6 +755,7 @@ lib/gitlab/checks/**
/doc/api/project_import_export.md @eread @ashrafkhamis
/doc/api/project_job_token_scopes.md @marcel.amirault
/doc/api/project_level_variables.md @marcel.amirault
/doc/api/project_packages_protection_rules.md @phillipwells
/doc/api/project_relations_export.md @eread @ashrafkhamis
/doc/api/project_repository_storage_moves.md @eread
/doc/api/project_snippets.md @msedlakjakubowski
@ -839,7 +841,6 @@ lib/gitlab/checks/**
/doc/development/audit_event_guide/ @gitlab-org/govern/security-policies-frontend @gitlab-org/govern/threat-insights-frontend-team @gitlab-org/govern/threat-insights-backend-team
/doc/development/avoiding_required_stops.md @gitlab-org/distribution
/doc/development/build_test_package.md @gitlab-org/distribution
/doc/development/cascading_settings.md @gitlab-org/govern/authentication/approvers
/doc/development/cells/ @abdwdd @alexpooley @manojmj
/doc/development/cicd/ @gitlab-org/maintainers/cicd-verify
/doc/development/contributing/verify/ @gitlab-org/maintainers/cicd-verify
@ -925,10 +926,10 @@ lib/gitlab/checks/**
/doc/tutorials/update_commit_messages/ @msedlakjakubowski
/doc/tutorials/website_project_with_analytics/ @lciutacu
/doc/update/ @axil
/doc/user/ai_data_usage.md @sselhorn
/doc/user/ai_experiments.md @sselhorn
/doc/user/ai_features.md @sselhorn
/doc/user/ai_features_enable.md @sselhorn
/doc/user/ai_data_usage.md @sselhorn @jglassman1
/doc/user/ai_experiments.md @sselhorn @jglassman1
/doc/user/ai_features.md @sselhorn @jglassman1
/doc/user/ai_features_enable.md @sselhorn @jglassman1
/doc/user/analytics/ @lciutacu
/doc/user/analytics/ci_cd_analytics.md @phillipwells
/doc/user/application_security/ @rdickenson @phillipwells
@ -944,8 +945,6 @@ lib/gitlab/checks/**
/doc/user/compliance/license_approval_policies.md @rdickenson
/doc/user/compliance/license_list.md @rdickenson
/doc/user/compliance/license_scanning_of_cyclonedx_files/ @rdickenson @phillipwells
/doc/user/custom_roles.md @jglassman1
/doc/user/custom_roles/ @jglassman1
/doc/user/discussions/ @aqualls
/doc/user/emoji_reactions.md @msedlakjakubowski
/doc/user/enterprise_user/ @jglassman1
@ -953,11 +952,11 @@ lib/gitlab/checks/**
/doc/user/get_started/get_started_managing_infrastructure.md @phillipwells
/doc/user/get_started/get_started_planning_work.md @msedlakjakubowski
/doc/user/get_started/get_started_projects.md @lciutacu
/doc/user/gitlab_duo_chat.md @sselhorn
/doc/user/gitlab_duo_chat_enable.md @sselhorn
/doc/user/gitlab_duo_chat_examples.md @sselhorn
/doc/user/gitlab_duo_chat_troubleshooting.md @sselhorn
/doc/user/gitlab_duo_examples.md @sselhorn
/doc/user/gitlab_duo_chat.md @sselhorn @jglassman1
/doc/user/gitlab_duo_chat_enable.md @sselhorn @jglassman1
/doc/user/gitlab_duo_chat_examples.md @sselhorn @jglassman1
/doc/user/gitlab_duo_chat_troubleshooting.md @sselhorn @jglassman1
/doc/user/gitlab_duo_examples.md @sselhorn @jglassman1
/doc/user/group/access_and_permissions.md @lciutacu
/doc/user/group/clusters/ @phillipwells
/doc/user/group/compliance_frameworks.md @eread
@ -992,7 +991,6 @@ lib/gitlab/checks/**
/doc/user/packages/container_registry/ @marcel.amirault
/doc/user/packages/dependency_proxy/ @marcel.amirault
/doc/user/packages/harbor_container_registry/ @marcel.amirault
/doc/user/permissions.md @jglassman1
/doc/user/product_analytics/ @lciutacu
/doc/user/profile/account/ @jglassman1
/doc/user/profile/achievements.md @lciutacu
@ -1028,17 +1026,17 @@ lib/gitlab/checks/**
/doc/user/project/merge_requests/cherry_pick_changes.md @msedlakjakubowski
/doc/user/project/merge_requests/csv_export.md @eread
/doc/user/project/merge_requests/methods/ @msedlakjakubowski
/doc/user/project/merge_requests/reviews/data_usage.md @sselhorn
/doc/user/project/merge_requests/reviews/data_usage.md @sselhorn @jglassman1
/doc/user/project/merge_requests/squash_and_merge.md @msedlakjakubowski
/doc/user/project/merge_requests/status_checks.md @rdickenson
/doc/user/project/ml/experiment_tracking/ @sselhorn
/doc/user/project/ml/model_registry/ @sselhorn
/doc/user/project/ml/experiment_tracking/ @sselhorn @jglassman1
/doc/user/project/ml/model_registry/ @sselhorn @jglassman1
/doc/user/project/organize_work_with_projects.md @lciutacu
/doc/user/project/project_topics.md @lciutacu
/doc/user/project/releases/ @phillipwells
/doc/user/project/releases/release_evidence.md @eread
/doc/user/project/remote_development/ @ashrafkhamis
/doc/user/project/repository/code_explain.md @sselhorn
/doc/user/project/repository/code_explain.md @sselhorn @jglassman1
/doc/user/project/repository/code_suggestions/ @jglassman1
/doc/user/project/repository/file_finder.md @ashrafkhamis
/doc/user/project/repository/monorepos/ @eread

View File

@ -16,3 +16,4 @@ variables:
QA_OMNIBUS_MR_TESTS: "only-smoke"
# Retry failed specs in separate process
QA_RETRY_FAILED_SPECS: "true"
GITLAB_HELM_CHART_REF: "343ef285e2e06c1d66a7077ba3c251bacad56868" # helm chart ref used by test-on-cng pipeline

View File

@ -57,6 +57,7 @@ workflow:
--timeout 5m \
--admin-password "${GITLAB_ADMIN_PASSWORD}" \
--admin-token "${GITLAB_QA_ADMIN_ACCESS_TOKEN}" \
--chart-sha "${GITLAB_HELM_CHART_REF}" \
--ci \
${EXTRA_DEPLOY_VALUES}
script:

View File

@ -1 +1 @@
e91769b14ae7f8a62582a9763ee4e4b284f7fbdc
55197564f4f57d8049d351f5c4614bc9449c2a72

View File

@ -221,7 +221,7 @@
{"name":"gitlab-glfm-markdown","version":"0.0.17","platform":"x86_64-linux","checksum":"cc877ff8ceb3aa8a331fdb8991592e35897823e0f77ba9e4b2b65082c665089b"},
{"name":"gitlab-labkit","version":"0.36.0","platform":"ruby","checksum":"35f21d1c3870ed0c9b8321e25d0b0b0b5021805a5d0525d1eb0fde6b103af981"},
{"name":"gitlab-license","version":"2.4.0","platform":"ruby","checksum":"fd238fb1e605a6b9250d4eb1744434ffd131f18d50a3be32f613c883f7635e20"},
{"name":"gitlab-mail_room","version":"0.0.24","platform":"ruby","checksum":"c7bf3df73dbcc024bc98dbf72514520ac2ff2b6d0124de496279fe56c13c3cb3"},
{"name":"gitlab-mail_room","version":"0.0.25","platform":"ruby","checksum":"223ce7c3c0797b6015eaa37147884e6ddc7be9a7ee90a424358c96bc18613b1a"},
{"name":"gitlab-markup","version":"1.9.0","platform":"ruby","checksum":"7eda045a08ec2d110084252fa13a8c9eac8bdac0e302035ca7db4b82bcbd7ed4"},
{"name":"gitlab-net-dns","version":"0.9.2","platform":"ruby","checksum":"f726d978479d43810819f12a45c0906d775a07e34df111bbe693fffbbef3059d"},
{"name":"gitlab-sdk","version":"0.3.0","platform":"ruby","checksum":"22260f148451155c2e7bdfa1ea9f3e50061a7c31700cb80f8859713560b88903"},

View File

@ -713,11 +713,11 @@ GEM
pg_query (>= 4.2.3, < 6.0)
redis (> 3.0.0, < 6.0.0)
gitlab-license (2.4.0)
gitlab-mail_room (0.0.24)
gitlab-mail_room (0.0.25)
jwt (>= 2.0)
net-imap (>= 0.2.1)
oauth2 (>= 1.4.4, < 3)
redis (>= 4, < 6)
redis (>= 5, < 6)
redis-namespace (>= 1.8.2)
gitlab-markup (1.9.0)
gitlab-net-dns (0.9.2)

View File

@ -121,7 +121,7 @@ class ProjectImportState < ApplicationRecord
update_column(:last_error, sanitized_message)
rescue ActiveRecord::ActiveRecordError => e
Gitlab::Import::Logger.error(
::Import::Framework::Logger.error(
message: 'Error setting import status to failed',
error: e.message,
original_error: sanitized_message

View File

@ -10,7 +10,7 @@ module Groups
@current_user = user
@user_role = user_role
@shared = Gitlab::ImportExport::Shared.new(@group)
@logger = Gitlab::Import::Logger.build
@logger = ::Import::Framework::Logger.build
end
def async_execute

View File

@ -100,7 +100,7 @@ module Import
end
def log_error(message)
Gitlab::Import::Logger.error(
::Import::Framework::Logger.error(
message: 'Import failed due to a BitBucket Server error',
error: message
)

View File

@ -97,7 +97,7 @@ module Import
end
def log_error(message)
Gitlab::Import::Logger.error(
::Import::Framework::Logger.error(
message: 'BitBucket Cloud import failed',
error: message
)

View File

@ -100,7 +100,7 @@ module Import
end
def log_error(message)
Gitlab::Import::Logger.error(
::Import::Framework::Logger.error(
message: 'Import failed due to a Fogbugz error',
error: message
)

View File

@ -64,6 +64,6 @@ class ImportExportCleanUpService
end
def logger
@logger ||= Gitlab::Import::Logger.build
@logger ||= ::Import::Framework::Logger.build
end
end

View File

@ -60,7 +60,7 @@ module Projects
end
def log_lfs_link_results(lfs_objects_linked_count, iterations)
Gitlab::Import::Logger.info(
::Import::Framework::Logger.info(
class: self.class.name,
project_id: project.id,
project_path: project.full_path,

View File

@ -99,7 +99,7 @@ module Gitlab
project = Project.find_by_id(project_id)
strategy = project.import_data&.data&.dig("timeout_strategy") || ProjectImportData::PESSIMISTIC_TIMEOUT
Gitlab::Import::Logger.info(
::Import::Framework::Logger.info(
message: 'Timeout reached, no longer retrying',
project_id: project_id,
jobs_remaining: new_job_count,

View File

@ -55,7 +55,7 @@ module Gitlab
completed_import_states = enqueued_import_states_with_jid.id_in(completed_import_state_ids)
completed_import_state_jids = completed_import_states.map { |import_state| import_state.jid }.join(', ')
Gitlab::Import::Logger.info(
::Import::Framework::Logger.info(
message: 'Marked stuck import jobs as failed',
job_ids: completed_import_state_jids
)

View File

@ -32,7 +32,7 @@ module Gitlab
return false unless project
return true if start(project.latest_jira_import)
Gitlab::Import::Logger.info(
::Import::Framework::Logger.info(
{
project_id: project.id,
project_path: project.full_path,

View File

@ -27,7 +27,7 @@ module Projects
# causing GC to run every time.
service.increment!
rescue Repositories::HousekeepingService::LeaseTaken => e
Gitlab::Import::Logger.info(
::Import::Framework::Logger.info(
message: 'Project housekeeping failed',
project_full_path: @project.full_path,
project_id: @project.id,

View File

@ -43,7 +43,7 @@ class RepositoryImportWorker # rubocop:disable Scalability/IdempotentWorker
def start_import?
return true if start(project.import_state)
Gitlab::Import::Logger.info(
::Import::Framework::Logger.info(
message: 'Project was in inconsistent state while importing',
project_full_path: project.full_path,
project_import_status: project.import_status

View File

@ -9,4 +9,6 @@ description: Information about package dependencies for a set of supported packa
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/20549
milestone: '12.6'
gitlab_schema: gitlab_main_cell
exempt_from_sharding: true # this table lacks customer specific data, but it is still unique in every cell
allow_cross_foreign_keys:
- gitlab_main_clusterwide
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/465276

View File

@ -0,0 +1,32 @@
---
owning-stage: "~devops::verify"
description: 'GitLab Secrets Manager ADR 004: Use OpenBao as the secrets management service'
---
# GitLab Secrets Manager ADR 004: Use OpenBao as the secrets management service
## Context
To store and maintain secrets securely in the GitLab Secrets Manager, we want to rely on a robust
system that can provide the necessary features that we need.
## Decision
Use [OpenBao](https://openbao.org/docs/what-is-openbao/), a fork of HashiCorp Vault, as the secrets management service.
This component will provide all the mechanism to securely store and manage secrets.
In terms of user-initiated modifications of secrets, GitLab Rails will act as an abstraction
layer and will delegate all tasks to this component.
Using OpenBao provides a few advantages:
1. Avoid implementing our own secure storage of secrets.
1. Support for Hardware Security Modules (HSM).
1. Leverage existing integration mechanism that we have for HashiCorp Vault because OpenBao maintains backwards compatibility with the open source edition of Vault.
## Consequences
To provide uninterrupted access to secrets, we need the OpenBao vault to always be unsealed.
We have to ensure that the proper policies and access rights are in place to prevent actors from obtaining secrets in an event that they gain access to the container running GitLab Rails.
Also, given the encryption, decryption, and storage of secrets all happen in the OpenBao server, we have to make sure to harden the security and prevent a breach of the vault instance.

View File

@ -0,0 +1,22 @@
---
owning-stage: "~devops::verify"
description: 'GitLab Secrets Manager ADR 005: Non-hierarchical key structure for secrets in OpenBao'
---
# GitLab Secrets Manager ADR 005: Non-hierarchical key structure for secrets in OpenBao
## Context
In GitLab, we have a hierarchical structure for projects and their parent namespaces wherein names can be identical in certain parts of the paths. We want to ensure that there are no conflicts with secrets paths across the hierarchy and across all customers when we store then in OpenBao.
## Decision
While secrets are defined in a hierarchical fashion in the GitLab UI, the secret key paths are structured in a flat manner.
Consider the following example path of a project with nested namespaces:
- `gitlab-org/ci-cd/verify/test-project`
- The secrets for the top-level group `gitlab-org` are stored under `kv-v2/data/namespaces/ci/<ID of gitlab-org>`
- The secrets for the subgroup `verify` are stored under `kv-v2/data/namespaces/ci/<ID of verify>`
- The secrets for the project `test-project` are stored under `kv-v2/data/projects/ci/<ID of test-project>`
- Note the use of `ci/` prefix so that we can group different types of secrets.

View File

@ -1,8 +1,8 @@
---
status: proposed
creation-date: "2023-08-07"
authors: [ "@alberts-gitlab" ]
coach: [ "@grzesiek" ]
authors: [ "@alberts-gitlab", "@iamricecake" ]
coach: [ "@grzesiek", "@fabiopitino" ]
approvers: [ "@jocelynjane", "@shampton" ]
owning-stage: "~devops::verify"
participating-stages: []
@ -35,7 +35,7 @@ In addition, customer's engineering teams using these external secret managers
may need to maintain these systems themselves, adding to the operational burden.
Having a GitLab native secret manager would provide customers a secure method to store and access secrets
without the overhead of third party tools as well as to leverage the tight integration with other GitLab features.
without the overhead of third party tools and to leverage the tight integration with other GitLab features.
### Goals
@ -45,7 +45,19 @@ Provide GitLab users with a way to:
- Use the stored secrets in GitLab components (for example, CI Runner)
- Use the stored secrets in external environments (for example, production infrastructure).
- Manage access to secrets across a root namespace, subgroups and projects.
- Seal/unseal secrets vault on demand.
#### Use Cases
To help design the architecture, we need to understand how users, in their roles, would
operate and use the system. Here are significant use case scenarios that can help drive our
design decisions:
- As a user running a pipeline, I want a CI job to automatically fetch secrets specified in the `.gitlab-ci.yml` file.
- As a DevOps engineer, I want the deployment process to fetch secrets necessary for deployment directly from GitLab Secrets Manager.
- As a DevOps engineer, I want to manually retrieve the staging database password from the GitLab Secrets Manager.
- As a project maintainer, I want to destroy all secrets in the scope of the project, when the project is being deleted.
- As a GitLab instance admin, I want to quickly rotate all runner registration tokens.
- As a FIPS compliant customer, I want GitLab Secrets Manager to encrypt/decrypt secrets using an HSM solution.
#### Non-functional requirements
@ -61,72 +73,63 @@ This blueprint does not cover the following:
## Decisions
- [ADR-004: Use OpenBao as the secrets management service](decisions/004_openbao.md)
- [ADR-005: Non-hierarchical key structure for secrets in OpenBao](decisions/005_secrets_key_structure.md)
### Superseded
These documents are part of the initial iteration of this blueprint.
- [ADR-001: Use envelope encryption](decisions/001_envelop_encryption.md)
- [ADR-002: Use GCP Key Management Service](decisions/002_gcp_kms.md)
- [ADR-003: Build Secrets Manager in Go](decisions/003_go_service.md)
## Proposal
The secrets manager feature will consist of three core components:
The secrets manager feature will be available on both SaaS and Self-Managed installations
and will consist of two core components:
1. GitLab Rails
1. GitLab Secrets Manager Service
1. GCP Key Management
1. OpenBao Server
At a high level, secrets will be stored using unique encryption keys in order to achieve isolation
across GitLab. Each service should also be isolated such that in the event
one of the components is compromised, the likelihood of a secrets leaking is minimized.
```mermaid
flowchart LR
c([Consumer]) --interacts with-->glab[GitLab Rails]--with backend-->o[OpenBao]
```
![Secrets Manager Overview](secrets-manager-overview.png)
A consumer can be:
1. A user who interacts manually with a client library, API, or UI.
1. An integration, for example, Vault integration on Runner.
**1. GitLab Rails**
GitLab Rails would be the main interface that users would interact with when creating secrets using the Secrets Manager feature.
GitLab Rails would be the main interface that users would interact with when managing secrets using the Secrets Manager feature.
This component performs the following role:
This component is a facade to OpenBao server.
1. Storing unique encryption public keys per organization.
1. Encrypting and storing secret using envelope encryption.
**2. OpenBao Server**
The plain-text secret would be encrypted using a single use data key.
The data key is then encrypted using the public key belonging to the group or project.
Both, the encrypted secret and the encrypted data key, are being stored in the database.
OpenBao Server will be a new component in the GitLab overall architecture. This component provides all the secrets management capabilities
including storing the secrets themselves.
**2. GitLab Secrets Manager Service**
### Use Case Studies
GitLab Secrets Manager Service will be a new component in the GitLab overall architecture. This component serves the following purpose:
1. Correlating GitLab identities into GCP identities for access control.
1. A proxy over GCP Key Management for decrypting operations.
[The service will use Go-based tech stack](decisions/003_go_service.md) and [labkit](https://gitlab.com/gitlab-org/labkit).
**3. GCP Key Management**
We choose to leverage GCP Key Management to build on the security and trust that GCP provides on cryptographic operations.
In particular, we would be using GCP Key Management to store the private keys that will be used to decrypt
the data keys mentioned above.
### Implementation detail
- [Secrets Manager](secrets_manager.md)
- [Using secrets in a CI job](studies/ci_job_secrets.md)
### Further investigations required
1. Management of identities stored in GCP Key Management.
We need to investigate how we can correlate and de-multiplex GitLab identities into
GCP identities that are used to allow access to cryptographic operations on GCP Key Management.
1. Authentication of clients. Clients to the Secrets Manager could be GitLab Runner or external clients.
For each of these, we need a secure and reliable method to authenticate requests to decrypt a secret.
1. Assignment of GCP backed private keys to each identity.
### Availability on SaaS and Self-Managed
To begin with, the proposal above is intended for GitLab SaaS environment. GitLab SaaS is deployed on Google Cloud Platform.
Hence, GCP Key Management is the natural choice for a cloud-based key management service.
To extend this service to self-managed GitLab instances, we would consider using GitLab Cloud Connector as a proxy between
self-managed GitLab instances and the GitLab Secrets Manager.
1. Authentication of clients other than GitLab Runner.
GitLab Runner authenticates using JWT, for other types of clients, we need a secure and reliable method to authenticate requests to decrypt a secret.
1. How to namespace data, roles and policies to specific tenant.
1. How to allow organizations to seal/unseal secrets vault on demand.
1. Infrastructure setup, including how OpenBao will be installed for self-managed instances.
1. How to best implement sharing of secrets between multiple groups in GitLab.
1. Establish our protocol and processes for incidents that may require sealing the secrets vault.
1. How to support protected and environment specific rules for secrets.
1. How to audit secret changes. Do we want to use [audit socket](https://openbao.org/docs/audit/socket/)?
1. Do we want to structure project secret paths to be under namespaces to increase isolation between tenants?
1. Should the secrets be revoked if a project or subgroup is moved under a different top-level group/organization?
## Alternative Solutions

Binary file not shown.

Before

Width:  |  Height:  |  Size: 117 KiB

View File

@ -1,14 +0,0 @@
---
status: proposed
creation-date: "2023-08-07"
authors: [ "@alberts-gitlab" ]
coach: [ "@grzesiek" ]
approvers: [ "@jocelynjane", "@shampton" ]
owning-stage: "~devops::verify"
participating-stages: []
---
<!-- Blueprints often contain forward-looking statements -->
<!-- vale gitlab.FutureTense = NO -->
# GitLab Secrets Manager - Implementation Detail (Placeholder)

View File

@ -0,0 +1,232 @@
---
owning-stage: "~devops::verify"
description: 'Use case study: using secrets in a CI job'
---
# Use case study: using secrets in a CI job
## Objectives
- To map out how users can use their native GitLab secrets in their CI jobs.
- Given OpenBao is a fork of HashiCorp Vault, we want to confirm its compatibility with our [Vault integration in Runner](../../../../ci/secrets/index.md).
- At a high level, gain a better understanding of how to structure OpenBao [policies](https://openbao.org/docs/concepts/policies/) and [JWT roles](https://openbao.org/docs/auth/jwt/#configuration) to be compatible with a project's varied permissions per GitLab user role.
## Prerequisites
The workflow requires that the [templated policies](https://openbao.org/docs/concepts/policies/#templated-policies) for each combination of [capabilities](https://openbao.org/docs/concepts/policies/#capabilities) (e.g. `read+update`, `read+update+create`) are predefined. For example, consider the following templated policy that allows full access to a project's secrets:
```shell
bao policy write project_full_access - <<EOF
path "kv-v2/data/projects/{{identity.entity.aliases.auth_jwt_02163755.metadata.project_id}}/*" {
capabilities = [ "read", "create", "update", "delete", "list" ]
}
EOF
```
The policies are associated to JWT roles on authorization. The `project_full_access` policy is particularly important for the initial project owner role:
```shell
bao write auth/jwt/role/project_owner - <<EOF
{
"role_type": "jwt",
"policies": ["project_full_access"],
"token_explicit_max_ttl": 60,
"user_claim": "user_id",
"claim_mappings": {
"project_id": "project_id"
},
"bound_audiences": "secrets.gitlab.com",
"bound_claims_type": "glob",
"bound_claims": {
"user_access_level": "owner"
}
}
EOF
```
Given OpenBao policies are deny by default, this initial JWT role is necessary to grant project owners full access to read and write secrets.
## Initial setup workflow
Details the steps and technical information for when the project's native secrets are set up for the first time.
1. Project owner enables GitLab Secrets manager through the GitLab UI.
1. Project owner defines additional permissions on which GitLab user roles can read, write, or create secrets through the GitLab UI.
- By default, project owners have full access and other roles are denied.
- For example, if the owner allows read-only access for `developer` role then, through the OpenBao API, the Rails backend defines `project_88_developer`:
```shell
# The format of the role name is `project_<project-id>_<user-role>`
bao write auth/jwt/role/project_88_developer - <<EOF
{
"role_type": "jwt",
"policies": ["project_read_only"],
"token_explicit_max_ttl": 60,
"user_claim": "user_id",
"claim_mappings": {
"project_id": "project_id"
},
"bound_audiences": "secrets.gitlab.com",
"bound_claims_type": "glob",
"bound_claims": {
"user_access_level": "developer"
}
}
EOF
```
- Unlike the `project_owner` generic role, we have to define other non-owner roles tied to the project because projects may have different combinations of permissions per user role.
1. Project owner defines secrets through the GitLab UI.
- User defines details such as name, key, and value. Sample input:
- name: `Production Database Password`
- key: `DB_PASS`
- value: `mydbpass`
- The secret is stored in OpenBao under `kv-v2/data/projects/88/ci/DB_PASS`, with the JSON data:
```json
{
"data": "mydbpass"
}
```
- The user doesn't need to enter the secret value in JSON format. The Rails backend transforms the input into JSON object with the `data` key before sending it to OpenBao.
1. Developer uses the `secrets` keyword in the `.gitlab-ci.yml`.
- Sample configuration:
```yaml
job-with-secrets:
secrets:
MY_SECRET_ON_OPENBAO:
key: DB_PASS # Translates to kv-v2/data/projects/88/DB_PASS, field `data`
```
- There is no need to specify `id_tokens:VAULT_ID_TOKEN` as `aud` defaults to `https://secrets.gitlab.com` where OpenBao service is.
- Unlike with HashiCorp Vault, there is no need to define CI/CD variables.
- The `VAULT_SERVER_URL` defaults to `https://secrets.gitlab.com` where OpenBao service is.
- The `VAULT_AUTH_ROLE` defaults to `project_<project_id>_<job_user_role>` to match the JWT role in OpenBao.
1. The CI job runs and `MY_SECRET_ON_OPENBAO` is available as an environment variable.
- OpenBao verifies the integrity of the ID token and validates the `bound_claims` if it matches the custom claims, specially the `user_access_level` which contains the GitLab user role of the user.
- Similar to HashiCorp Vault secrets, this is a [`file` variable](../../../../ci/variables/index.md#use-file-type-cicd-variables).
## Technical implementation findings
High-level technical implementation details pertaining to OpenBao and Rails to support the workflow.
1. The OpenBao service needs to be properly configured to make it compatible with the workflow.
- Configure [JWT authentication](https://openbao.org/docs/auth/jwt/#jwt-authentication) to make it work with [ID tokens authentication](../../../../ci/secrets/id_token_authentication.md#automatic-id-token-authentication-with-hashicorp-vault).
- The documentation shows [instructions](../../../../ci/secrets/index.md#configure-your-vault-server) using the `vault` CLI, but it should work similarly for `bao`.
- The OpenBao API is reachable through `https://secrets.gitlab.com`.
- To reference the `project_id` in the templated policy, it was needed to get the value of the JWT auth mount accessor (`auth_jwt_02163755` from the result of `bao auth list`). This has to be automated during deployments so that the templated policies remain up-to-date with the correct accessor. The mount accessor value is persisted in storage and keeps its value even when the OpenBao server is restarted and sealed.
1. The Rails backend needs the accompanying implementations to support the workflow.
- ActiveRecord model for the secrets. Listing secrets and viewing details in the UI shouldn't make a request to OpenBao.
- ActiveRecord model for the permissions. Listing permissions in the UI shouldn't make a request to OpenBao.
- Update ID tokens related implementation to support the use of ID tokens without the need to define `id_tokens` in the CI configuration.
- Proper mapping of defaults for `VAULT_SERVER_URL` and `VAULT_AUTH_ROLE`.
## How to test locally
The policies and roles structure presented here was first tested locally on a GDK setup and OpenBao server running on [`dev` mode](https://openbao.org/docs/get-started/developer-qs/).
Here's a step-by-step guide on how to test this locally:
1. Make sure [GDK is properly set up with runner](https://gitlab.com/gitlab-org/gitlab-development-kit/blob/main/doc/howto/runner.md).
- Tested on a [GDK with Docker executor](https://gitlab.com/gitlab-org/gitlab-development-kit/blob/main/doc/howto/runner.md#set-up-a-local-network) and pointed `gdk.test` to `172.16.123.1` but this should also work with a shell executor.
- Confirm that you can successfully run a CI pipeline on a test project.
1. Create the test project for fetching the secrets from OpenBao later.
- Track its project ID. In this example, the project ID was `53`.
1. Start-up the OpenBao in [`dev` mode](https://openbao.org/docs/concepts/dev-server/).
```shell
bao server -dev -dev-root-token-id="dev-only-token"
```
- This makes OpenBao reachable at `http://127.0.0.1:8200`.
- You might need to run `export BAO_ADDR='http://127.0.0.1:8200'` for the `bao` CLI commands below to work.
1. Enable kv-v2 secrets engine.
```shell
bao secrets enable kv-v2 # By default mounts to `kv-v2/data`
```
1. Configure OpenBao JWT authentication.
```shell
bao write auth/jwt/config \
oidc_discovery_url="http://gdk.test:3000" \
bound_issuer="http://gdk.test:3000"
```
1. To test the policy and role generated for a project owner with the GitLab user role `owner`, create the [templated policy](https://openbao.org/docs/concepts/policies/#templated-policies) and the JWT role for the specific `owner` role. The JWT role was based on the [GitLab Vault sample server role](../../../../ci/secrets/index.md#configure-vault-server-roles).
- Take note of the value of the JWT auth mount accessor when you run `bao auth list`:
```shell
Path Type Accessor Description Version
---- ---- -------- ----------- -------
jwt/ jwt auth_jwt_02163755 n/a n/a
token/ token auth_token_90d6d0c1 token based credentials n/a
```
- define the templated policy and reference the `project_id` through the metadata of the mounted JWT auth plugin:
```shell
bao policy write project_full_access - <<EOF
# owners have full read-write access to their project's secrets
# copy over the `auth_jwt_02163755` mount accessor value
path "kv-v2/data/projects/{{identity.entity.aliases.auth_jwt_02163755.metadata.project_id}}/*" {
capabilities = [ "read", "create", "update", "delete", "list" ]
}
EOF
```
- define the JWT role and associate the `project_full_access` policy:
```shell
bao write auth/jwt/role/project_owner - <<EOF
{
"role_type": "jwt",
"policies": ["project_full_access"],
"token_explicit_max_ttl": 60,
"user_claim": "user_id",
"claim_mappings": {
"project_id": "project_id"
},
"bound_audiences": "secrets.gitlab.com",
"bound_claims_type": "glob",
"bound_claims": {
"user_access_level": "owner"
}
}
EOF
```
1. Create a sample secret that we want to fetch in the CI job.
```shell
bao kv put -mount=kv-v2 projects/53/foo val=my-long-passcode
```
1. On the test project, configure the `.gitlab-ci.yml` to fetch secrets from OpenBao using the existing [Vault integration](../../../../ci/secrets/index.md#use-vault-secrets-in-a-ci-job).
```yaml
test_openbao:
variables:
VAULT_SERVER_URL: http://127.0.0.1:8200
VAULT_AUTH_ROLE: project_owner
id_tokens:
VAULT_ID_TOKEN:
aud: secrets.gitlab.com
secrets:
SECRET:
vault: projects/53/foo/val # translates to secret `kv-v2/data/projects/53/foo`, field `val`
token: $VAULT_ID_TOKEN
script:
- echo "testing..."
- cat $SECRET
- echo "done."
```
- `VAULT_AUTH_ROLE` matches the JWT role we created earlier.
- `aud` matches the role's `bound_audiences`.
- The ID token generated in this job is matched by OpenBao using the `bound_claims`, specifically the `user_access_level` which is included in the [custom claims](../../../../ci/secrets/id_token_authentication.md#token-payload) of the ID token.
1. Run a pipeline and confirm that in the job trace there's a masked output of the secret that it fetched from OpenBao.

View File

@ -0,0 +1,460 @@
---
stage: none
group: unassigned
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
description: Data Seeder test data harness created by the Test Data Working Group https://handbook.gitlab.com/handbook/company/working-groups/demo-test-data/
---
# Data Seeder
The Data Seeder is a test data seeding harness, that can seed test data into a user or group namespace.
The Data Seeder uses FactoryBot in the backend which makes maintenance straightforward and future-proof. When a Model changes,
FactoryBot already reflects the change.
## Docker Setup
### With GDK
1. Start a containerized GitLab instance using local files
```shell
docker run \
-d \
-p 8080:80 \
--name gitlab \
-v ./scripts/data_seeder:/opt/gitlab/embedded/service/gitlab-rails/scripts/data_seeder \
-v ./ee/db/seeds/data_seeder:/opt/gitlab/embedded/service/gitlab-rails/ee/db/seeds/data_seeder \
-v ./ee/lib/tasks/gitlab/seed:/opt/gitlab/embedded/service/gitlab-rails/ee/lib/tasks/gitlab/seed \
-v ./spec:/opt/gitlab/embedded/service/gitlab-rails/spec \
-v ./ee/spec:/opt/gitlab/embedded/service/gitlab-rails/ee/spec \
gitlab/gitlab-ee:16.9.8-ee.0
```
1. Globalize test gems
```shell
docker exec gitlab bash -c "cd /opt/gitlab/embedded/service/gitlab-rails; ruby scripts/data_seeder/globalize_gems.rb; bundle install"
```
1. Seed the data
```shell
docker exec -it gitlab gitlab-rake "ee:gitlab:seed:data_seeder[beautiful_data.rb]"
```
### Without GDK
1. Start a containerized GitLab instance
```shell
docker run \
-p 8080:80 \
--name gitlab \
-d \
gitlab/gitlab-ee:16.9.8-ee.0
```
1. Import the test resources
```ruby
docker exec gitlab bash -c "wget -O - https://gitlab.com/gitlab-org/gitlab/-/raw/master/scripts/data_seeder/test_resources.sh | bash"
```
```ruby
# OR check out a specific branch, commit, or tag
docker exec gitlab bash -c "wget -O - https://gitlab.com/gitlab-org/gitlab/-/raw/master/scripts/data_seeder/test_resources.sh | REF=v16.7.0-ee bash"
```
### Get the root password
To fetch the password for the GitLab instance that was created, execute the following command and use the password given by the output:
```shell
docker exec gitlab cat /etc/gitlab/initial_root_password
```
_If you receive `cat: /etc/gitlab/initialize_root_password: No such file or directory`, please wait for a bit for GitLab to boot and try again._
You can then sign in to `http://localhost:8080/users/sign_in` using the credentials: `root / <Password taken from initial_root_password>`
### Seed the data
**IMPORTANT**: This step should not be executed until the container has started completely and you are able to see the login page at `http://localhost:8080`.
```shell
docker exec -it gitlab gitlab-rake "ee:gitlab:seed:data_seeder[beautiful_data.rb]"
```
## GDK Setup
```shell
$ gdk start db
ok: run: services/postgresql: (pid n) 0s, normally down
ok: run: services/redis: (pid n) 74s, normally down
$ bundle install
Bundle complete!
$ bundle exec rake db:migrate
main: migrated
ci: migrated
```
### Run
The [`ee:gitlab:seed:data_seeder` Rake task](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/seed/data_seeder.rake) takes one argument. `:file`.
```shell
$ bundle exec rake "ee:gitlab:seed:data_seeder[beautiful_data.rb]"
Seeding data for Administrator
....
```
#### `:file`
Where `:file` is the file path. (This path reflects relative `.rb`, `.yml`, or `.json` files located in `ee/db/seeds/data_seeder`, or absolute paths to seed files.)
## Linux package Setup
WARNING:
While it is possible to use the Data Seeder with an Linux package installation, **use caution** if you do this when the instance is being used in a production setting.
1. Change the working directory to the GitLab installation:
```shell
cd /opt/gitlab/embedded/service/gitlab-rails
```
1. Install test resources:
```shell
. scripts/data_seeder/test_resources.sh
```
1. Globalize gems:
```shell
/opt/gitlab/embedded/bin/chpst -e /opt/gitlab/etc/gitlab-rails/env /opt/gitlab/embedded/bin/bundle exec ruby scripts/data_seeder/globalize_gems.rb
```
1. Install bundle:
```shell
/opt/gitlab/embedded/bin/chpst -e /opt/gitlab/etc/gitlab-rails/env /opt/gitlab/embedded/bin/bundle
```
1. Seed the data:
```shell
gitlab-rake "ee:gitlab:seed:data_seeder[beautiful_data.rb]"
```
## Develop
The Data Seeder uses FactoryBot definitions from `spec/factories` which ...
1. Saves time on development
1. Are easy-to-read
1. Are easy to maintain
1. Do not rely on an API that may change in the future
1. Are always up-to-date
1. Executes on the lowest-level possible ([ORM](https://guides.rubyonrails.org/active_record_basics.html#active-record-as-an-orm-framework)) to create data as quickly as possible
> From the [FactoryBot README](https://github.com/thoughtbot/factory_bot#readme_) : `factory_bot` is a fixtures replacement with a straightforward definition syntax, support for multiple build
> strategies (saved instances, unsaved instances, attribute hashes, and stubbed objects), and support for multiple factories for the same class, including factory
> inheritance
Factories reside in `spec/factories/*` and are fixtures for Rails models found in `app/models/*`. For example, For a model named `app/models/issue.rb`, the factory will
be named `spec/factories/issues.rb`. For a model named `app/models/project.rb`, the factory will be named `app/models/projects.rb`.
Three parsers currently exist that the GitLab Data Seeder supports. Ruby, YAML, and JSON.
### Ruby
All Ruby Seeds must define a `DataSeeder` class with a `#seed` instance method. You may structure your Ruby class as you wish. All FactoryBot [methods](https://www.rubydoc.info/gems/factory_bot/FactoryBot/Syntax/Methods) (`create`, `build`, `create_list`) are included in the class automatically and may be called.
The `DataSeeder` class contains the following instance variables defined upon seeding:
- `@seed_file` - The `File` object.
- `@owner` - The owner of the seed data.
- `@name` - The name of the seed. This is the seed file name without the extension.
- `@group` - The root group that all seeded data is created under.
- `@logger` - The logger object to log output. Logging output may be found in `log/data_seeder.log`.
```ruby
# frozen_string_literal: true
class DataSeeder
def seed
my_group = create(:group, name: 'My Group', path: 'my-group-path', parent: @group)
@logger.info "Created #{my_group.name}" #=> Created My Group
my_project = create(:project, :public, name: 'My Project', namespace: my_group, creator: @owner)
end
end
```
### YAML
The YAML Parser is a DSL that supports Factory definitions and allows you to seed data using a human-readable format.
```yaml
name: My Seeder
groups:
- _id: my_group
name: My Group
path: my-group-path
projects:
- _id: my_project
name: My Project
namespace_id: <%= groups.my_group.id %>
creator_id: <%= @owner.id %>
traits:
- public
```
### JSON
The JSON Parser allows you to house seed files in JSON format.
```json
{
"name": "My Seeder",
"groups": [
{ "_id": "my_group", "name": "My Group", "path": "my-group-path" }
],
"projects": [
{
"_id": "my_project",
"name": "My Project",
"namespace_id": "<%= groups.my_group.id %>",
"creator_id": "<%= @owner.id %>",
"traits": ["public"]
}
]
}
```
### Logging
When running the Data Seeder, the default level of logging is set to "information".
You can override the logging level by specifying `GITLAB_LOG_LEVEL=<level>`.
```shell
$ GITLAB_LOG_LEVEL=debug bundle exec rake "ee:gitlab:seed:data_seeder[beautiful_data.rb]"
Seeding data for Administrator
......
$ GITLAB_LOG_LEVEL=warn bundle exec rake "ee:gitlab:seed:data_seeder[beautiful_data.rb]"
Seeding data for Administrator
......
$ GITLAB_LOG_LEVEL=error bundle exec rake "ee:gitlab:seed:data_seeder[beautiful_data.rb]"
......
```
### Taxonomy of a Factory
Factories consist of three main parts - the **Name** of the factory, the **Traits** and the **Attributes**.
Given: `create(:iteration, :with_title, :current, title: 'My Iteration')`
|||
|:-|:-|
| **:iteration** | This is the **Name** of the factory. The filename will be the plural form of this **Name** and reside under either `spec/factories/iterations.rb` or `ee/spec/factories/iterations.rb`. |
| **:with_title** | This is a **Trait** of the factory. [See how it's defined](https://gitlab.com/gitlab-org/gitlab/-/blob/9c2a1f98483921dd006d70fdaed316e21fc5652f/ee/spec/factories/iterations.rb#L21-23). |
| **:current** | This is a **Trait** of the factory. [See how it's defined](https://gitlab.com/gitlab-org/gitlab/-/blob/9c2a1f98483921dd006d70fdaed316e21fc5652f/ee/spec/factories/iterations.rb#L29-31). |
| **title: 'My Iteration'** | This is an **Attribute** of the factory that is passed to the Model for creation. |
### Examples
In these examples, you will see an instance variable `@owner`. This is the `root` user (`User.first`).
#### Create a Group
```ruby
my_group = create(:group, name: 'My Group', path: 'my-group-path')
```
#### Create a Project
```ruby
# create a Project belonging to a Group
my_project = create(:project, :public, name: 'My Project', namespace: my_group, creator: @owner)
```
#### Create an Issue
```ruby
# create an Issue belonging to a Project
my_issue = create(:issue, title: 'My Issue', project: my_project, weight: 2)
```
#### Create an Iteration
```ruby
# create an Iteration under a Group
my_iteration = create(:iteration, :with_title, :current, title: 'My Iteration', group: my_group)
```
### Frequently encountered issues
#### Username or email has already been taken
If you see either of these errors:
- `ActiveRecord::RecordInvalid: Validation failed: Email has already been taken`
- `ActiveRecord::RecordInvalid: Validation failed: Username has already been taken`
This is because, by default, our factories are written to backfill any data that is missing. For instance, when a project
is created, the project must have somebody that created it. If the owner is not specified, the factory attempts to create it.
**How to fix**
Check the respective Factory to find out what key is required. Usually `:author` or `:owner`.
```ruby
# This throws ActiveRecord::RecordInvalid
create(:project, name: 'Throws Error', namespace: create(:group, name: 'Some Group'))
# Specify the user where @owner is a [User] record
create(:project, name: 'No longer throws error', owner: @owner, namespace: create(:group, name: 'Some Group'))
create(:epic, group: create(:group), author: @owner)
```
#### `parsing id "my id" as "my_id"`
See [specifying variables](#specify-a-variable)
#### `id is invalid`
Given that non-Ruby parsers parse IDs as Ruby Objects, the [naming conventions](https://docs.ruby-lang.org/en/2.0.0/syntax/methods_rdoc.html#label-Method+Names) of Ruby must be followed when specifying an ID.
Examples of invalid IDs:
- IDs that start with a number
- IDs that have special characters (`-`, `!`, `$`, `@`, `` ` ``, `=`, `<`, `>`, `;`, `:`)
#### ActiveRecord::AssociationTypeMismatch: Model expected, got ... which is an instance of String
This is a limitation for the seeder.
See the issue for [allowing parsing of raw Ruby objects](https://gitlab.com/gitlab-org/gitlab/-/issues/403079).
## YAML Factories
### Generator to generate _n_ amount of records
### [Group Labels](https://gitlab.com/gitlab-org/gitlab/-/blob/master/spec/factories/labels.rb)
```yaml
group_labels:
# Group Label with Name and a Color
- name: Group Label 1
group_id: <%= @group.id %>
color: "#FF0000"
```
### [Group Milestones](https://gitlab.com/gitlab-org/gitlab/-/blob/master/spec/factories/milestones.rb)
```yaml
group_milestones:
# Past Milestone
- name: Past Milestone
group_id: <%= @group.id %>
group:
start_date: <%= 1.month.ago %>
due_date: <%= 1.day.ago %>
# Ongoing Milestone
- name: Ongoing Milestone
group_id: <%= @group.id %>
group:
start_date: <%= 1.day.ago %>
due_date: <%= 1.month.from_now %>
# Future Milestone
- name: Ongoing Milestone
group_id: <%= @group.id %>
group:
start_date: <%= 1.month.from_now %>
due_date: <%= 2.months.from_now %>
```
#### Quirks
- You _must_ specify `group:` and have it be empty. This is because the Milestones factory manipulates the factory in an `after(:build)`. If this is not present, the Milestone cannot be associated properly with the Group.
### [Epics](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/spec/factories/epics.rb)
```yaml
epics:
# Simple Epic
- title: Simple Epic
group_id: <%= @group.id %>
author_id: <%= @owner.id %>
# Epic with detailed Markdown description
- title: Detailed Epic
group_id: <%= @group.id %>
author_id: <%= @owner.id %>
description: |
# Markdown
**Description**
# Epic with dates
- title: Epic with dates
group_id: <%= @group.id %>
author_id: <%= @owner.id %>
start_date: <%= 1.day.ago %>
due_date: <%= 1.month.from_now %>
```
## Variables
Each created factory can be assigned an identifier to be used in future seeding.
You can specify an ID for any created factory that you may use later in the seed file.
### Specify a variable
You may pass an `_id` attribute on any factory to refer back to it later in non-Ruby parsers.
Variables are under the factory definitions that they reside in.
```yaml
---
group_labels:
- _id: my_label #=> group_labels.my_label
projects:
- _id: my_project #=> projects.my_project
```
Variables:
NOTE:
It is not advised, but you may specify variables with spaces. These variables may be referred back to with underscores.
### Referencing a variable
Given a YAML seed file:
```yaml
---
group_labels:
- _id: my_group_label #=> group_labels.my_group_label
name: My Group Label
color: "#FF0000"
- _id: my_other_group_label #=> group_labels.my_other_group_label
color: <%= group_labels.my_group_label.color %>
projects:
- _id: my_project #=> projects.my_project
name: My Project
```
When referring to a variable, the variable refers to the _already seeded_ models. In other words, the model's `id` attribute will
be populated.

View File

@ -115,7 +115,7 @@ importer progresses. Here's what to do:
attr_accessor :logger
def initialize
@logger = Gitlab::Import::Logger.build
@logger = ::Import::Framework::Logger.build
end
```

View File

@ -1,460 +1,11 @@
---
stage: Manage
group: Foundations
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
description: Data Seeder test data harness created by the Test Data Working Group https://handbook.gitlab.com/handbook/company/working-groups/demo-test-data/
redirect_to: '../development/data_seeder.md'
remove_date: '2024-06-03'
---
# GitLab Data Seeder
This document was moved to [another location](../development/data_seeder.md).
GitLab Data Seeder (GDS) is a test data seeding harness, that can seed test data into a user or group namespace.
The Data Seeder uses FactoryBot in the backend which makes maintenance straightforward and future-proof. When a Model changes,
FactoryBot already reflects the change.
## Docker Setup
### With GDK
1. Start a containerized GitLab instance using local files
```shell
docker run \
-d \
-p 8080:80 \
--name gitlab \
-v ./scripts/data_seeder:/opt/gitlab/embedded/service/gitlab-rails/scripts/data_seeder \
-v ./ee/db/seeds/data_seeder:/opt/gitlab/embedded/service/gitlab-rails/ee/db/seeds/data_seeder \
-v ./ee/lib/tasks/gitlab/seed:/opt/gitlab/embedded/service/gitlab-rails/ee/lib/tasks/gitlab/seed \
-v ./spec:/opt/gitlab/embedded/service/gitlab-rails/spec \
-v ./ee/spec:/opt/gitlab/embedded/service/gitlab-rails/ee/spec \
gitlab/gitlab-ee:16.9.8-ee.0
```
1. Globalize test gems
```shell
docker exec gitlab bash -c "cd /opt/gitlab/embedded/service/gitlab-rails; ruby scripts/data_seeder/globalize_gems.rb; bundle install"
```
1. Seed the data
```shell
docker exec -it gitlab gitlab-rake "ee:gitlab:seed:data_seeder[beautiful_data.rb]"
```
### Without GDK
1. Start a containerized GitLab instance
```shell
docker run \
-p 8080:80 \
--name gitlab \
-d \
gitlab/gitlab-ee:16.9.8-ee.0
```
1. Import the test resources
```ruby
docker exec gitlab bash -c "wget -O - https://gitlab.com/gitlab-org/gitlab/-/raw/master/scripts/data_seeder/test_resources.sh | bash"
```
```ruby
# OR check out a specific branch, commit, or tag
docker exec gitlab bash -c "wget -O - https://gitlab.com/gitlab-org/gitlab/-/raw/master/scripts/data_seeder/test_resources.sh | REF=v16.7.0-ee bash"
```
### Get the root password
To fetch the password for the GitLab instance that was created, execute the following command and use the password given by the output:
```shell
docker exec gitlab cat /etc/gitlab/initial_root_password
```
_If you receive `cat: /etc/gitlab/initialize_root_password: No such file or directory`, please wait for a bit for GitLab to boot and try again._
You can then sign in to `http://localhost:8080/users/sign_in` using the credentials: `root / <Password taken from initial_root_password>`
### Seed the data
**IMPORTANT**: This step should not be executed until the container has started completely and you are able to see the login page at `http://localhost:8080`.
```shell
docker exec -it gitlab gitlab-rake "ee:gitlab:seed:data_seeder[beautiful_data.rb]"
```
## GDK Setup
```shell
$ gdk start db
ok: run: services/postgresql: (pid n) 0s, normally down
ok: run: services/redis: (pid n) 74s, normally down
$ bundle install
Bundle complete!
$ bundle exec rake db:migrate
main: migrated
ci: migrated
```
### Run
The [`ee:gitlab:seed:data_seeder` Rake task](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/seed/data_seeder.rake) takes one argument. `:file`.
```shell
$ bundle exec rake "ee:gitlab:seed:data_seeder[beautiful_data.rb]"
Seeding data for Administrator
....
```
#### `:file`
Where `:file` is the file path. (This path reflects relative `.rb`, `.yml`, or `.json` files located in `ee/db/seeds/data_seeder`, or absolute paths to seed files.)
## Linux package Setup
WARNING:
While it is possible to use the Data Seeder with an Linux package installation, **use caution** if you do this when the instance is being used in a production setting.
1. Change the working directory to the GitLab installation:
```shell
cd /opt/gitlab/embedded/service/gitlab-rails
```
1. Install test resources:
```shell
. scripts/data_seeder/test_resources.sh
```
1. Globalize gems:
```shell
/opt/gitlab/embedded/bin/chpst -e /opt/gitlab/etc/gitlab-rails/env /opt/gitlab/embedded/bin/bundle exec ruby scripts/data_seeder/globalize_gems.rb
```
1. Install bundle:
```shell
/opt/gitlab/embedded/bin/chpst -e /opt/gitlab/etc/gitlab-rails/env /opt/gitlab/embedded/bin/bundle
```
1. Seed the data:
```shell
gitlab-rake "ee:gitlab:seed:data_seeder[beautiful_data.rb]"
```
## Develop
The Data Seeder uses FactoryBot definitions from `spec/factories` which ...
1. Saves time on development
1. Are easy-to-read
1. Are easy to maintain
1. Do not rely on an API that may change in the future
1. Are always up-to-date
1. Executes on the lowest-level possible ([ORM](https://guides.rubyonrails.org/active_record_basics.html#active-record-as-an-orm-framework)) to create data as quickly as possible
> From the [FactoryBot README](https://github.com/thoughtbot/factory_bot#readme_) : `factory_bot` is a fixtures replacement with a straightforward definition syntax, support for multiple build
> strategies (saved instances, unsaved instances, attribute hashes, and stubbed objects), and support for multiple factories for the same class, including factory
> inheritance
Factories reside in `spec/factories/*` and are fixtures for Rails models found in `app/models/*`. For example, For a model named `app/models/issue.rb`, the factory will
be named `spec/factories/issues.rb`. For a model named `app/models/project.rb`, the factory will be named `app/models/projects.rb`.
Three parsers currently exist that the GitLab Data Seeder supports. Ruby, YAML, and JSON.
### Ruby
All Ruby Seeds must define a `DataSeeder` class with a `#seed` instance method. You may structure your Ruby class as you wish. All FactoryBot [methods](https://www.rubydoc.info/gems/factory_bot/FactoryBot/Syntax/Methods) (`create`, `build`, `create_list`) are included in the class automatically and may be called.
The `DataSeeder` class contains the following instance variables defined upon seeding:
- `@seed_file` - The `File` object.
- `@owner` - The owner of the seed data.
- `@name` - The name of the seed. This is the seed file name without the extension.
- `@group` - The root group that all seeded data is created under.
- `@logger` - The logger object to log output. Logging output may be found in `log/data_seeder.log`.
```ruby
# frozen_string_literal: true
class DataSeeder
def seed
my_group = create(:group, name: 'My Group', path: 'my-group-path', parent: @group)
@logger.info "Created #{my_group.name}" #=> Created My Group
my_project = create(:project, :public, name: 'My Project', namespace: my_group, creator: @owner)
end
end
```
### YAML
The YAML Parser is a DSL that supports Factory definitions and allows you to seed data using a human-readable format.
```yaml
name: My Seeder
groups:
- _id: my_group
name: My Group
path: my-group-path
projects:
- _id: my_project
name: My Project
namespace_id: <%= groups.my_group.id %>
creator_id: <%= @owner.id %>
traits:
- public
```
### JSON
The JSON Parser allows you to house seed files in JSON format.
```json
{
"name": "My Seeder",
"groups": [
{ "_id": "my_group", "name": "My Group", "path": "my-group-path" }
],
"projects": [
{
"_id": "my_project",
"name": "My Project",
"namespace_id": "<%= groups.my_group.id %>",
"creator_id": "<%= @owner.id %>",
"traits": ["public"]
}
]
}
```
### Logging
When running the Data Seeder, the default level of logging is set to "information".
You can override the logging level by specifying `GITLAB_LOG_LEVEL=<level>`.
```shell
$ GITLAB_LOG_LEVEL=debug bundle exec rake "ee:gitlab:seed:data_seeder[beautiful_data.rb]"
Seeding data for Administrator
......
$ GITLAB_LOG_LEVEL=warn bundle exec rake "ee:gitlab:seed:data_seeder[beautiful_data.rb]"
Seeding data for Administrator
......
$ GITLAB_LOG_LEVEL=error bundle exec rake "ee:gitlab:seed:data_seeder[beautiful_data.rb]"
......
```
### Taxonomy of a Factory
Factories consist of three main parts - the **Name** of the factory, the **Traits** and the **Attributes**.
Given: `create(:iteration, :with_title, :current, title: 'My Iteration')`
|||
|:-|:-|
| **:iteration** | This is the **Name** of the factory. The filename will be the plural form of this **Name** and reside under either `spec/factories/iterations.rb` or `ee/spec/factories/iterations.rb`. |
| **:with_title** | This is a **Trait** of the factory. [See how it's defined](https://gitlab.com/gitlab-org/gitlab/-/blob/9c2a1f98483921dd006d70fdaed316e21fc5652f/ee/spec/factories/iterations.rb#L21-23). |
| **:current** | This is a **Trait** of the factory. [See how it's defined](https://gitlab.com/gitlab-org/gitlab/-/blob/9c2a1f98483921dd006d70fdaed316e21fc5652f/ee/spec/factories/iterations.rb#L29-31). |
| **title: 'My Iteration'** | This is an **Attribute** of the factory that is passed to the Model for creation. |
### Examples
In these examples, you will see an instance variable `@owner`. This is the `root` user (`User.first`).
#### Create a Group
```ruby
my_group = create(:group, name: 'My Group', path: 'my-group-path')
```
#### Create a Project
```ruby
# create a Project belonging to a Group
my_project = create(:project, :public, name: 'My Project', namespace: my_group, creator: @owner)
```
#### Create an Issue
```ruby
# create an Issue belonging to a Project
my_issue = create(:issue, title: 'My Issue', project: my_project, weight: 2)
```
#### Create an Iteration
```ruby
# create an Iteration under a Group
my_iteration = create(:iteration, :with_title, :current, title: 'My Iteration', group: my_group)
```
### Frequently encountered issues
#### Username or email has already been taken
If you see either of these errors:
- `ActiveRecord::RecordInvalid: Validation failed: Email has already been taken`
- `ActiveRecord::RecordInvalid: Validation failed: Username has already been taken`
This is because, by default, our factories are written to backfill any data that is missing. For instance, when a project
is created, the project must have somebody that created it. If the owner is not specified, the factory attempts to create it.
**How to fix**
Check the respective Factory to find out what key is required. Usually `:author` or `:owner`.
```ruby
# This throws ActiveRecord::RecordInvalid
create(:project, name: 'Throws Error', namespace: create(:group, name: 'Some Group'))
# Specify the user where @owner is a [User] record
create(:project, name: 'No longer throws error', owner: @owner, namespace: create(:group, name: 'Some Group'))
create(:epic, group: create(:group), author: @owner)
```
#### `parsing id "my id" as "my_id"`
See [specifying variables](#specify-a-variable)
#### `id is invalid`
Given that non-Ruby parsers parse IDs as Ruby Objects, the [naming conventions](https://docs.ruby-lang.org/en/2.0.0/syntax/methods_rdoc.html#label-Method+Names) of Ruby must be followed when specifying an ID.
Examples of invalid IDs:
- IDs that start with a number
- IDs that have special characters (`-`, `!`, `$`, `@`, `` ` ``, `=`, `<`, `>`, `;`, `:`)
#### ActiveRecord::AssociationTypeMismatch: Model expected, got ... which is an instance of String
This is a limitation for the seeder.
See the issue for [allowing parsing of raw Ruby objects](https://gitlab.com/gitlab-org/gitlab/-/issues/403079).
## YAML Factories
### Generator to generate _n_ amount of records
### [Group Labels](https://gitlab.com/gitlab-org/gitlab/-/blob/master/spec/factories/labels.rb)
```yaml
group_labels:
# Group Label with Name and a Color
- name: Group Label 1
group_id: <%= @group.id %>
color: "#FF0000"
```
### [Group Milestones](https://gitlab.com/gitlab-org/gitlab/-/blob/master/spec/factories/milestones.rb)
```yaml
group_milestones:
# Past Milestone
- name: Past Milestone
group_id: <%= @group.id %>
group:
start_date: <%= 1.month.ago %>
due_date: <%= 1.day.ago %>
# Ongoing Milestone
- name: Ongoing Milestone
group_id: <%= @group.id %>
group:
start_date: <%= 1.day.ago %>
due_date: <%= 1.month.from_now %>
# Future Milestone
- name: Ongoing Milestone
group_id: <%= @group.id %>
group:
start_date: <%= 1.month.from_now %>
due_date: <%= 2.months.from_now %>
```
#### Quirks
- You _must_ specify `group:` and have it be empty. This is because the Milestones factory manipulates the factory in an `after(:build)`. If this is not present, the Milestone cannot be associated properly with the Group.
### [Epics](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/spec/factories/epics.rb)
```yaml
epics:
# Simple Epic
- title: Simple Epic
group_id: <%= @group.id %>
author_id: <%= @owner.id %>
# Epic with detailed Markdown description
- title: Detailed Epic
group_id: <%= @group.id %>
author_id: <%= @owner.id %>
description: |
# Markdown
**Description**
# Epic with dates
- title: Epic with dates
group_id: <%= @group.id %>
author_id: <%= @owner.id %>
start_date: <%= 1.day.ago %>
due_date: <%= 1.month.from_now %>
```
## Variables
Each created factory can be assigned an identifier to be used in future seeding.
You can specify an ID for any created factory that you may use later in the seed file.
### Specify a variable
You may pass an `_id` attribute on any factory to refer back to it later in non-Ruby parsers.
Variables are under the factory definitions that they reside in.
```yaml
---
group_labels:
- _id: my_label #=> group_labels.my_label
projects:
- _id: my_project #=> projects.my_project
```
Variables:
NOTE:
It is not advised, but you may specify variables with spaces. These variables may be referred back to with underscores.
### Referencing a variable
Given a YAML seed file:
```yaml
---
group_labels:
- _id: my_group_label #=> group_labels.my_group_label
name: My Group Label
color: "#FF0000"
- _id: my_other_group_label #=> group_labels.my_other_group_label
color: <%= group_labels.my_group_label.color %>
projects:
- _id: my_project #=> projects.my_project
name: My Project
```
When referring to a variable, the variable refers to the _already seeded_ models. In other words, the model's `id` attribute will
be populated.
<!-- This redirect file can be deleted after <2024-06-03>. -->
<!-- Redirects that point to other docs in the same project expire in three months. -->
<!-- Redirects that point to docs in a different project or site (for example, link is not relative and starts with `https:`) expire in one year. -->
<!-- Before deletion, see: https://docs.gitlab.com/ee/development/documentation/redirects.html -->

View File

@ -151,13 +151,14 @@ Updating CA certificates...
Runtime platform arch=amd64 os=linux pid=7 revision=1b659122 version=12.8.0
Running in system-mode.
Please enter the gitlab-ci coordinator URL (for example, https://gitlab.com/):
Enter the GitLab instance URL (for example, https://gitlab.com/):
https://my-host.internal
Please enter the gitlab-ci token for this runner:
Enter the registration token:
XXXXXXXXXXX
Please enter the gitlab-ci description for this runner:
Enter a description for the runner:
[eb18856e13c0]:
Please enter the gitlab-ci tags for this runner (comma separated):
Enter tags for the runner (comma-separated):
Enter optional maintenance note for the runner:
Registering runner... succeeded runner=FSMwkvLZ
Please enter the executor: custom, docker, virtualbox, kubernetes, docker+machine, docker-ssh+machine, docker-ssh, parallels, shell, ssh:

View File

@ -167,6 +167,8 @@ Prerequisites:
## Onboard a GitLab project
> - Minimum required role [changed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/154089/) in GitLab 17.1.
Prerequisites:
- You must have at least the Maintainer role for the project or group the project belongs to.

View File

@ -2,6 +2,7 @@
require "thor"
require "require_all"
require "uri"
# make sure helpers are required first
require_rel "lib/helpers/**/*.rb"

View File

@ -8,21 +8,37 @@ module Gitlab
# Command to check system dependencies
#
class Doctor < Command
TOOLS = %w[docker kind kubectl helm].freeze
TOOLS = {
"docker" => { required: true },
"kind" => { required: true },
"kubectl" => { required: true },
"helm" => { required: true },
"tar" => {
required: false,
msg: ", tar is optional and only required for providing specific helm chart sha"
}
}.freeze
desc "doctor", "Validate presence of all required system dependencies"
def doctor
log "Checking system dependencies", :info, bright: true
missing_tools = TOOLS.filter_map do |tool|
Helpers::Spinner.spin("Checking if #{tool} is installed") do
raise "#{tool} not found in PATH" unless TTY::Which.exist?(tool)
missing_tools = TOOLS.reject do |tool, opts|
exists = TTY::Which.exist?(tool)
Helpers::Spinner.spin("Checking if #{tool} is installed", raise_on_error: opts[:required]) do
raise "#{tool} not found in PATH#{opts[:msg]}" unless exists
end
rescue StandardError
tool
end
return log "All system dependencies are present", :success, bright: true if missing_tools.empty?
exit_with_error "The following system dependencies are missing: #{missing_tools.join(', ')}"
exists
rescue StandardError
exists
end
return log("All system dependencies are present", :success, bright: true) if missing_tools.empty?
optional = missing_tools.reject { |_tool, opt| opt[:required] }.keys
required = missing_tools.keys - optional
log("Following optional system dependecies are missing: #{optional.join(', ')}", :warn) if optional.any?
exit_with_error "Following required system dependencies are missing: #{required.join(', ')}" if required.any?
end
end
end

View File

@ -37,6 +37,10 @@ module Gitlab
desc: "Timeout for deployment",
default: "10m",
type: :string
option :chart_sha,
desc: "Specific sha of GitLab chart repository, latest release version is used by default. " \
"Requires 'tar' executable to be installed.",
type: :string
super(name)
end
@ -100,7 +104,7 @@ module Gitlab
def installation(name, configuration)
Cng::Deployment::Installation.new(
name, configuration: configuration,
**symbolized_options.slice(:namespace, :set, :ci, :gitlab_domain, :timeout)
**symbolized_options.slice(:namespace, :set, :ci, :gitlab_domain, :timeout, :chart_sha)
)
end

View File

@ -14,7 +14,7 @@ module Gitlab
LICENSE_SECRET = "gitlab-license"
def initialize(name, configuration:, namespace:, ci:, gitlab_domain:, timeout:, set: [])
def initialize(name, configuration:, namespace:, ci:, gitlab_domain:, timeout:, set: [], chart_sha: nil)
@name = name
@configuration = configuration
@namespace = namespace
@ -22,6 +22,7 @@ module Gitlab
@gitlab_domain = gitlab_domain
@timeout = timeout
@set = set
@chart_sha = chart_sha
end
# Perform deployment with all the additional setup
@ -29,8 +30,8 @@ module Gitlab
# @return [void]
def create
log("Creating CNG deployment '#{name}'", :info, bright: true)
run_pre_deploy_setup
run_deploy
chart_reference = run_pre_deploy_setup
run_deploy(chart_reference)
run_post_deploy_setup
rescue Helpers::Shell::CommandFailure
exit(1)
@ -45,7 +46,15 @@ module Gitlab
private
attr_reader :name, :configuration, :namespace, :ci, :set, :gitlab_domain, :timeout
attr_reader :name,
:configuration,
:namespace,
:ci,
:set,
:gitlab_domain,
:timeout,
:chart_sha
alias_method :cli_values, :set
# Kubectl client instance
@ -55,6 +64,13 @@ module Gitlab
@kubeclient ||= Kubectl::Client.new(namespace)
end
# Helm client instance
#
# @return [Helm::Client]
def helm
@helm ||= Helm::Client.new
end
# Gitlab license
#
# @return [String]
@ -83,40 +99,41 @@ module Gitlab
}
end
# Execute pre-deployment setup
# Execute pre-deployment setup which consists of:
# * chart setup
# * namespace and license creation
# * optional configuration specific pre-deploy setup
#
# @return [void]
# @return [String] chart reference
def run_pre_deploy_setup
Helpers::Spinner.spin("running pre-deployment setup") do
add_helm_chart
update_helm_chart_repo
chart_reference = helm.add_helm_chart(chart_sha)
create_namespace
create_license
configuration.run_pre_deployment_setup
chart_reference
end
end
# Run helm deployment
#
# @param [String] chart_reference
# @return [void]
def run_deploy
cmd = [
"upgrade",
"--install", name, "gitlab/gitlab",
"--namespace", namespace,
"--timeout", timeout,
"--wait"
]
cmd.push(*component_version_values.flat_map { |v| ["--set", v] }) if ci
cmd.push("--set", cli_values.join(",")) unless cli_values.empty?
cmd.push("--values", "-")
def run_deploy(chart_reference)
args = []
args.push(*component_version_values.flat_map { |v| ["--set", v] }) if ci
args.push("--set", cli_values.join(",")) unless cli_values.empty?
values = DefaultValues.common_values(gitlab_domain)
.deep_merge(license_values)
.deep_merge(configuration.values)
.deep_stringify_keys
.to_yaml
Helpers::Spinner.spin("running helm deployment") { puts run_helm_cmd(cmd, values.to_yaml) }
Helpers::Spinner.spin("running helm deployment") do
helm.upgrade(name, chart_reference, namespace: namespace, timeout: timeout, values: values, args: args)
end
log("Deployment successful and app is available via: #{configuration.gitlab_url}", :success, bright: true)
end
@ -127,26 +144,6 @@ module Gitlab
Helpers::Spinner.spin("running post-deployment setup") { configuration.run_post_deployment_setup }
end
# Add helm chart repo
#
# @return [void]
def add_helm_chart
log("Adding gitlab helm chart", :info)
puts run_helm_cmd(%w[repo add gitlab https://charts.gitlab.io])
rescue Helpers::Shell::CommandFailure => e
return log("helm repo already exists, skipping", :warn) if e.message.include?("already exists")
raise(e)
end
# Update helm chart repo
#
# @return [void]
def update_helm_chart_repo
log("Updating gitlab helm chart repo", :info)
puts run_helm_cmd(%w[repo update gitlab])
end
# Create namespace
#
# @return [void]

View File

@ -0,0 +1,105 @@
# frozen_string_literal: true
require "net/http"
module Gitlab
module Cng
module Helm
class Client
include Helpers::Shell
include Helpers::Output
HELM_CHART_PREFIX = "gitlab"
HELM_CHART = "https://charts.gitlab.io"
HELM_CHART_REPO = "https://gitlab.com/gitlab-org/charts/gitlab"
# Add helm chart and return reference
#
# @param [String] sha fetch and package helm chart using specific repo sha
# @return [String] chart reference or path to packaged chart tgz
def add_helm_chart(sha = nil)
return package_chart(sha) if sha
log("Adding gitlab helm chart '#{HELM_CHART}'", :info)
puts run_helm(%W[repo add #{HELM_CHART_PREFIX} #{HELM_CHART}])
"#{HELM_CHART_PREFIX}/gitlab"
rescue Helpers::Shell::CommandFailure => e
if e.message.include?("already exists")
log("helm chart repo already exists, updating", :warn)
puts(run_helm(%w[repo update gitlab]))
return "#{HELM_CHART_PREFIX}/gitlab"
end
raise(e)
end
# Run helm upgrade command with --install argument
#
# @param [String] name deployment name
# @param [String] chart helm chart reference
# @param [String] namespace deployment namespace
# @param [String] timeout timeout value like 5s, 10m
# @param [String] values yml string with helm values
# @param [Array] args extra arguments to pass to command
# @return [void]
def upgrade(name, chart, namespace:, timeout:, values:, args: [])
log("Upgrading helm release '#{name}' in namespace '#{namespace}'", :info)
puts run_helm([
"upgrade", "--install", name, chart,
"--namespace", namespace,
"--timeout", timeout,
"--values", "-",
"--wait",
*args
], values)
end
private
# Temporary directory for helm chart
#
# @return [String]
def tmp_dir
Helpers::Utils.tmp_dir
end
# Create chart package from specific chart repo sha
#
# @param [String] sha
# @return [String] path to package
def package_chart(sha)
log("Packaging chart for git sha '#{sha}'", :info)
chart_dir = fetch_chart_repo(sha)
puts run_helm(%W[package --dependency-update --destination #{chart_dir} #{chart_dir}])
chart_tar = Dir.glob("#{chart_dir}/gitlab-*.tgz").first
raise "Failed to package chart" unless chart_tar
chart_tar
end
# Download and extract helm chart
#
# @param [String] sha
# @return [String] path to extracted repo
def fetch_chart_repo(sha)
uri = URI("#{HELM_CHART_REPO}/-/archive/#{sha}/gitlab-#{sha}.tar")
res = Net::HTTP.get_response(uri)
raise "Failed to download chart, got response code: #{res.code}" unless res.code == "200"
tar = File.join(tmp_dir, "gitlab-#{sha}.tar").tap { |path| File.write(path, res.body) }
execute_shell(["tar", "-xf", tar, "-C", tmp_dir])
File.join(tmp_dir, "gitlab-#{sha}")
end
# Run helm command
#
# @param [Array] cmd
# @return [String]
def run_helm(cmd, stdin = nil)
execute_shell(["helm", *cmd], stdin_data: stdin)
end
end
end
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
module Gitlab
module Cng
module Helpers
module Utils
# Global tmp dir for file operations
#
# @return [String]
def self.tmp_dir
@tmp_dir ||= Dir.mktmpdir("cng")
end
end
end
end
end

View File

@ -1,6 +1,5 @@
# frozen_string_literal: true
require "uri"
require "tmpdir"
require "erb"
@ -85,7 +84,7 @@ module Gitlab
# @param [String] config_yml
# @return [String]
def tmp_config_file(config_yml)
File.join(Dir.tmpdir, "kind-config.yml").tap do |path|
File.join(Helpers::Utils.tmp_dir, "kind-config.yml").tap do |path|
File.write(path, config_yml)
end
end

View File

@ -5,14 +5,14 @@ RSpec.describe Gitlab::Cng::Commands::Doctor do
let(:spinner) { instance_double(Gitlab::Cng::Helpers::Spinner) }
let(:command_name) { "doctor" }
let(:tools_present) { true }
let(:tools) { %w[docker kind kubectl helm] }
let(:missing_tools) { [] }
before do
allow(Gitlab::Cng::Helpers::Spinner).to receive(:new) { spinner }
allow(spinner).to receive(:spin).and_yield
tools.each { |tool| allow(TTY::Which).to receive(:exist?).with(tool).and_return(tools_present) }
allow(TTY::Which).to receive(:exist?).and_return(true)
missing_tools.each { |tool| allow(TTY::Which).to receive(:exist?).with(tool).and_return(false) }
end
it "defines a doctor command" do
@ -33,13 +33,23 @@ RSpec.describe Gitlab::Cng::Commands::Doctor do
end
end
context "with missing tools" do
let(:tools_present) { false }
context "with missing optional tool" do
let(:missing_tools) { %w[tar] }
it "exits and prints missing dependencies error", :aggregate_failures do
it "prints missing dependencies warning", :aggregate_failures do
expect do
expect { invoke_command(command_name) }.not_to raise_error
end.to output(/Following optional system dependecies are missing: tar/).to_stdout
end
end
context "with missing required tools" do
let(:missing_tools) { %w[docker kind] }
it "prints missing dependencies error and raises SystemExit", :aggregate_failures do
expect do
expect { invoke_command(command_name) }.to raise_error(SystemExit)
end.to output(/The following system dependencies are missing: #{tools.join(', ')}/).to_stdout
end.to output(/Following required system dependencies are missing: docker, kind/).to_stdout
end
end
end

View File

@ -1,5 +1,6 @@
# frozen_string_literal: true
# rubocop:disable RSpec/MultipleMemoizedHelpers -- allows to reuse many of the mock definitions, with less helpers a lot more value duplication would occur
RSpec.describe Gitlab::Cng::Deployment::Installation, :aggregate_failures do
subject(:installation) do
described_class.new(
@ -8,18 +9,24 @@ RSpec.describe Gitlab::Cng::Deployment::Installation, :aggregate_failures do
namespace: "gitlab",
ci: ci,
gitlab_domain: gitlab_domain,
timeout: "10m"
timeout: "10m",
chart_sha: chart_sha
)
end
let(:config_values) { { configuration_specific: true } }
let(:ip) { instance_double(Addrinfo, ipv4_private?: true, ip_address: "127.0.0.1") }
let(:gitlab_domain) { "#{ip.ip_address}.nip.io" }
let(:gitlab_domain) { "127.0.0.1.nip.io" }
let(:chart_sha) { nil }
let(:chart_reference) { "chart-reference" }
let(:kubeclient) do
instance_double(Gitlab::Cng::Kubectl::Client, create_namespace: "", create_resource: "", execute: "")
end
let(:helmclient) do
instance_double(Gitlab::Cng::Helm::Client, add_helm_chart: chart_reference, upgrade: nil)
end
let(:configuration) do
instance_double(
Gitlab::Cng::Deployment::Configurations::Kind,
@ -75,6 +82,7 @@ RSpec.describe Gitlab::Cng::Deployment::Installation, :aggregate_failures do
before do
allow(Gitlab::Cng::Helpers::Spinner).to receive(:spin).and_yield
allow(Gitlab::Cng::Kubectl::Client).to receive(:new).with("gitlab").and_return(kubeclient)
allow(Gitlab::Cng::Helm::Client).to receive(:new).and_return(helmclient)
allow(Gitlab::Cng::Deployment::Configurations::Kind).to receive(:new).and_return(configuration)
allow(installation).to receive(:execute_shell)
@ -90,28 +98,14 @@ RSpec.describe Gitlab::Cng::Deployment::Installation, :aggregate_failures do
it "runs setup and helm deployment" do
expect { installation.create }.to output(/Creating CNG deployment 'gitlab'/).to_stdout
expect(installation).to have_received(:execute_shell).with(
%w[helm repo add gitlab https://charts.gitlab.io],
stdin_data: nil
)
expect(installation).to have_received(:execute_shell).with(
%w[helm repo add gitlab https://charts.gitlab.io],
stdin_data: nil
)
expect(installation).to have_received(:execute_shell).with(
%w[helm repo update gitlab],
stdin_data: nil
)
expect(installation).to have_received(:execute_shell).with(
%w[
helm upgrade
--install gitlab gitlab/gitlab
--namespace gitlab
--timeout 10m
--wait
--values -
],
stdin_data: values_yml
expect(helmclient).to have_received(:add_helm_chart).with(nil)
expect(helmclient).to have_received(:upgrade).with(
"gitlab",
chart_reference,
namespace: "gitlab",
timeout: "10m",
values: values_yml,
args: []
)
expect(kubeclient).to have_received(:create_namespace)
@ -123,19 +117,21 @@ RSpec.describe Gitlab::Cng::Deployment::Installation, :aggregate_failures do
end
end
context "with ci" do
context "with ci and specific sha" do
let(:ci) { true }
let(:chart_sha) { "sha" }
it "runs helm install with correctly merged values and component versions" do
expect { installation.create }.to output(/Creating CNG deployment 'gitlab'/).to_stdout
expect(installation).to have_received(:execute_shell).with(
%W[
helm upgrade
--install gitlab gitlab/gitlab
--namespace gitlab
--timeout 10m
--wait
expect(helmclient).to have_received(:add_helm_chart).with(chart_sha)
expect(helmclient).to have_received(:upgrade).with(
"gitlab",
chart_reference,
namespace: "gitlab",
timeout: "10m",
values: values_yml,
args: %W[
--set gitlab.gitaly.image.repository=registry.gitlab.com/gitlab-org/build/cng-mirror/gitaly
--set gitlab.gitaly.image.tag=7aa06a578d76bdc294ee8e9acb4f063e7d9f1d5f
--set gitlab.gitlab-shell.image.repository=registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-shell
@ -152,10 +148,9 @@ RSpec.describe Gitlab::Cng::Deployment::Installation, :aggregate_failures do
--set gitlab.webservice.image.tag=#{env['CI_COMMIT_SHA']}
--set gitlab.webservice.workhorse.image=registry.gitlab.com/gitlab-org/build/cng-mirror/gitlab-workhorse-ee
--set gitlab.webservice.workhorse.tag=#{env['CI_COMMIT_SHA']}
--values -
],
stdin_data: values_yml
]
)
end
end
end
# rubocop:enable RSpec/MultipleMemoizedHelpers

View File

@ -0,0 +1,105 @@
# frozen_string_literal: true
RSpec.describe Gitlab::Cng::Helm::Client do
subject(:client) { described_class.new }
before do
allow(client).to receive(:execute_shell)
end
describe "#add_helm_chart" do
let(:tmpdir) { Dir.mktmpdir("cng") }
before do
allow(Gitlab::Cng::Helpers::Utils).to receive(:tmp_dir).and_return(tmpdir)
end
context "with default chart" do
it "adds default chart repo" do
expect do
expect(client.add_helm_chart).to eq("gitlab/gitlab")
end.to output(%r{Adding gitlab helm chart 'https://charts.gitlab.io'}).to_stdout
expect(client).to have_received(:execute_shell).with(
%w[helm repo add gitlab https://charts.gitlab.io],
stdin_data: nil
)
end
it "updates chart repo when repository already exists" do
allow(client).to receive(:execute_shell)
.with(%w[helm repo add gitlab https://charts.gitlab.io], stdin_data: nil)
.and_raise(Gitlab::Cng::Helpers::Shell::CommandFailure.new(
"repository name (gitlab) already exists"
))
expect do
expect(client.add_helm_chart).to eq("gitlab/gitlab")
end.to output(/helm chart repo already exists, updating/).to_stdout
end
it "correctly raises error if command fails" do
allow(client).to receive(:execute_shell)
.with(%w[helm repo add gitlab https://charts.gitlab.io], stdin_data: nil)
.and_raise(Gitlab::Cng::Helpers::Shell::CommandFailure.new("something went wrong"))
expect { client.add_helm_chart }.to raise_error("something went wrong")
end
end
context "with specific chart sha" do
let(:sha) { "1888fda881ab" }
let(:chart_dir) { File.join(tmpdir, "gitlab-#{sha}") }
before do
allow(Net::HTTP).to receive(:get_response).with(
URI("https://gitlab.com/gitlab-org/charts/gitlab/-/archive/#{sha}/gitlab-#{sha}.tar")
).and_return(instance_double(Net::HTTPSuccess, body: "archive", code: "200"))
FileUtils.mkdir_p(chart_dir)
File.write(File.join(chart_dir, "gitlab-#{sha}.tgz"), "built chart")
end
it "packages chart from specific sha" do
expect do
expect(client.add_helm_chart(sha)).to eq(File.join(chart_dir, "gitlab-#{sha}.tgz"))
end.to output(/Packaging chart for git sha '#{sha}'/).to_stdout
expect(client).to have_received(:execute_shell).with(
%W[tar -xf #{File.join(tmpdir, "gitlab-#{sha}.tar")} -C #{tmpdir}]
)
expect(client).to have_received(:execute_shell).with(
%W[helm package --dependency-update --destination #{chart_dir} #{chart_dir}],
stdin_data: nil
)
end
end
end
describe "#upgrade" do
let(:values) { { vals: "vals" }.to_yaml }
before do
allow(client).to receive(:execute_shell).and_return("helm upgrade command output")
end
it "runs helm upgrade command" do
expect do
client.upgrade(
"gitlab", "gitlab/gitlab", namespace: "gitlab", timeout: "10m", values: values, args: ["--dry-run"]
)
end.to output(
match(/Upgrading helm release 'gitlab' in namespace 'gitlab'/).and(match(/helm upgrade command output/))
).to_stdout
expect(client).to have_received(:execute_shell).with(%w[
helm upgrade --install gitlab gitlab/gitlab
--namespace gitlab
--timeout 10m
--values -
--wait
--dry-run
], stdin_data: values)
end
end
end

View File

@ -14,11 +14,12 @@ RSpec.describe Gitlab::Cng::Kind::Cluster do
let(:ci) { false }
let(:name) { "gitlab" }
let(:docker_hostname) { nil }
let(:tmp_config_path) { File.join(Dir.tmpdir, "kind-config.yml") }
let(:tmp_config_path) { File.join("/tmp", "kind-config.yml") }
let(:command_status) { instance_double(Process::Status, success?: true) }
let(:clusters) { "kind" }
before do
allow(Gitlab::Cng::Helpers::Utils).to receive(:tmp_dir).and_return("/tmp")
allow(Gitlab::Cng::Helpers::Spinner).to receive(:spin).and_yield
allow(File).to receive(:write).with(tmp_config_path, kind_config_content)

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
module BulkImports
class Logger < ::Gitlab::Import::Logger
class Logger < ::Import::Framework::Logger
IMPORTER_NAME = 'gitlab_migration'
# Extract key information from a provided entity and include it in log

View File

@ -2,7 +2,7 @@
module Gitlab
module BitbucketImport
class Logger < ::Gitlab::Import::Logger
class Logger < ::Import::Framework::Logger
def default_attributes
super.merge(import_type: :bitbucket)
end

View File

@ -2,7 +2,7 @@
module Gitlab
module BitbucketServerImport
class Logger < ::Gitlab::Import::Logger
class Logger < ::Import::Framework::Logger
def default_attributes
super.merge(import_type: :bitbucket_server)
end

View File

@ -2,7 +2,7 @@
module Gitlab
module GithubImport
class Logger < ::Gitlab::Import::Logger
class Logger < ::Import::Framework::Logger
def default_attributes
super.merge(import_type: :github)
end

View File

@ -72,7 +72,7 @@ module Gitlab
external_identifiers: external_identifiers
}
Gitlab::Import::Logger.error(
::Import::Framework::Logger.error(
attributes.merge(
message: 'importer failed',
'exception.message': exception.message

View File

@ -71,7 +71,7 @@ module Gitlab
# instead of `allow_object_storage`.
Gitlab::HTTP.get(url, stream_body: true, allow_object_storage: true) do |fragment|
if [301, 302, 303, 307].include?(fragment.code)
Gitlab::Import::Logger.warn(message: "received redirect fragment", fragment_code: fragment.code)
::Import::Framework::Logger.warn(message: "received redirect fragment", fragment_code: fragment.code)
elsif fragment.code == 200
current_size += fragment.bytesize

View File

@ -94,7 +94,7 @@ module Gitlab
nil
end
Gitlab::Import::Logger.info(
::Import::Framework::Logger.info(
message: error,
import_upload_archive_path: @archive_path,
import_upload_archive_size: archive_size

View File

@ -181,7 +181,7 @@ module Gitlab
end
def logger
@logger ||= Gitlab::Import::Logger.build
@logger ||= ::Import::Framework::Logger.build
end
end
end

View File

@ -43,7 +43,7 @@ module Gitlab
@project.repository.create_branch(@merge_request.source_branch, @diff_head_sha)
end
rescue StandardError => err
Gitlab::Import::Logger.warn(
::Import::Framework::Logger.warn(
message: 'Import warning: Failed to create source branch',
source_branch: @merge_request.source_branch,
diff_head_sha: @diff_head_sha,
@ -56,7 +56,7 @@ module Gitlab
def create_target_branch
@project.repository.create_branch(@merge_request.target_branch, @merge_request.target_branch_sha)
rescue StandardError => err
Gitlab::Import::Logger.warn(
::Import::Framework::Logger.warn(
message: 'Import warning: Failed to create target branch',
target_branch: @merge_request.target_branch,
diff_head_sha: @diff_head_sha,

View File

@ -30,7 +30,7 @@ module Gitlab
def initialize(exportable)
@exportable = exportable
@errors = []
@logger = Gitlab::Import::Logger.build
@logger = ::Import::Framework::Logger.build
end
def active_export_count

View File

@ -36,7 +36,7 @@ module Gitlab
def different_version?(version)
Gitlab::VersionInfo.parse(version) != Gitlab::VersionInfo.parse(Gitlab::ImportExport.version)
rescue StandardError => e
Gitlab::Import::Logger.error(
::Import::Framework::Logger.error(
message: 'Import error',
error: e.message
)

View File

@ -83,7 +83,7 @@ module Gitlab
end
def logger
@logger ||= Gitlab::Import::Logger.build
@logger ||= ::Import::Framework::Logger.build
end
end
end

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
module Gitlab
module Import
module Import
module Framework
class Logger < ::Gitlab::JsonLogger
def self.file_name_noext
'importer'

View File

@ -21,12 +21,12 @@ namespace :tw do
CODE_OWNER_RULES = [
# CodeOwnerRule.new('Activation', ''),
# CodeOwnerRule.new('Acquisition', ''),
CodeOwnerRule.new('AI Framework', '@sselhorn'),
CodeOwnerRule.new('AI Model Validation', '@sselhorn'),
CodeOwnerRule.new('AI Framework', '@sselhorn @jglassman1'),
CodeOwnerRule.new('AI Model Validation', '@sselhorn @jglassman1'),
# CodeOwnerRule.new('Analytics Instrumentation', ''),
# CodeOwnerRule.new('Anti-Abuse', ''),
CodeOwnerRule.new('Authentication', '@jglassman1'),
CodeOwnerRule.new('Authorization', '@jglassman1'),
# CodeOwnerRule.new('Authorization', ''),
# CodeOwnerRule.new('Billing and Subscription Management', ''),
CodeOwnerRule.new('Cloud Connector', '@jglassman1'),
CodeOwnerRule.new('Code Creation', '@jglassman1'),
@ -35,14 +35,14 @@ namespace :tw do
CodeOwnerRule.new('Composition Analysis', '@rdickenson @phillipwells'),
CodeOwnerRule.new('Container Registry', '@marcel.amirault'),
CodeOwnerRule.new('Contributor Experience', '@eread'),
CodeOwnerRule.new('Custom Models', '@sselhorn'),
CodeOwnerRule.new('Custom Models', '@sselhorn @jglassman1'),
# CodeOwnerRule.new('Database', ''),
CodeOwnerRule.new('DataOps', '@sselhorn'),
CodeOwnerRule.new('DataOps', '@sselhorn @jglassman1'),
# CodeOwnerRule.new('Delivery', ''),
CodeOwnerRule.new('Distribution', '@axil'),
CodeOwnerRule.new('Distribution (Charts)', '@axil'),
CodeOwnerRule.new('Distribution (Omnibus)', '@eread'),
CodeOwnerRule.new('Duo Chat', '@sselhorn'),
CodeOwnerRule.new('Duo Chat', '@sselhorn @jglassman1'),
CodeOwnerRule.new('Dynamic Analysis', '@rdickenson @phillipwells'),
CodeOwnerRule.new('Editor Extensions', '@aqualls'),
CodeOwnerRule.new('Environments', '@phillipwells'),
@ -57,7 +57,7 @@ namespace :tw do
CodeOwnerRule.new('Import and Integrate', '@eread @ashrafkhamis'),
CodeOwnerRule.new('Infrastructure', '@sselhorn'),
# CodeOwnerRule.new('Knowledge', ''),
CodeOwnerRule.new('MLOps', '@sselhorn'),
CodeOwnerRule.new('MLOps', '@sselhorn @jglassman1'),
# CodeOwnerRule.new('Observability', ''),
CodeOwnerRule.new('Optimize', '@lciutacu'),
CodeOwnerRule.new('Organization', '@lciutacu'),

View File

@ -28,7 +28,7 @@ module QA
context 'when user adds a new file' do
let(:file_name) { 'first_file.txt' }
it 'shows successfully added and visible in project',
it 'shows successfully added and visible in project', :blocking,
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/432898' do
Page::Project::WebIDE::VSCode.perform do |ide|
ide.create_new_file(file_name)

View File

@ -76,7 +76,7 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures, featur
}
)
expect(Gitlab::Import::Logger)
expect(::Import::Framework::Logger)
.to receive(:error)
.with(
{
@ -120,7 +120,7 @@ RSpec.describe Gitlab::Import::ImportFailureService, :aggregate_failures, featur
}
)
expect(Gitlab::Import::Logger)
expect(::Import::Framework::Logger)
.to receive(:error)
.with(
{

View File

@ -176,7 +176,7 @@ RSpec.describe Gitlab::ImportExport::CommandLineUtil, feature_category: :importe
let(:status) { HTTP::Status.const_get(code, false) }
it 'logs the redirect' do
expect(Gitlab::Import::Logger).to receive(:warn)
expect(::Import::Framework::Logger).to receive(:warn)
Tempfile.create('test') do |file|
subject.download(url, file.path)

View File

@ -40,7 +40,7 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator, feature_c
end
it 'logs error message returns false' do
expect(Gitlab::Import::Logger)
expect(::Import::Framework::Logger)
.to receive(:info)
.with(
import_upload_archive_path: filepath,
@ -59,7 +59,7 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator, feature_c
end
it 'is valid and does not log error message' do
expect(Gitlab::Import::Logger)
expect(::Import::Framework::Logger)
.not_to receive(:info)
.with(
import_upload_archive_path: filepath,
@ -84,7 +84,7 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator, feature_c
end
it 'logs raised exception and terminates validator process group' do
expect(Gitlab::Import::Logger)
expect(::Import::Framework::Logger)
.to receive(:info)
.with(
import_upload_archive_path: filepath,
@ -115,7 +115,7 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator, feature_c
let(:filesize) { nil }
before do
expect(Gitlab::Import::Logger)
expect(::Import::Framework::Logger)
.to receive(:info)
.with(
import_upload_archive_path: filepath,

View File

@ -118,7 +118,7 @@ RSpec.describe Gitlab::ImportExport::MembersMapper do
end
context 'logging' do
let(:logger) { Gitlab::Import::Logger.build }
let(:logger) { ::Import::Framework::Logger.build }
before do
allow(logger).to receive(:info)

View File

@ -70,7 +70,7 @@ RSpec.describe Gitlab::ImportExport::MergeRequestParser, feature_category: :impo
it 'logs the error' do
allow(project.repository).to receive(:create_branch).and_raise(StandardError, 'Error!')
expect(Gitlab::Import::Logger).to receive(:warn).with(
expect(::Import::Framework::Logger).to receive(:warn).with(
message: 'Import warning: Failed to create target branch',
target_branch: merge_request.target_branch,
diff_head_sha: anything,

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Import::Logger do
RSpec.describe ::Import::Framework::Logger, feature_category: :importers do
subject { described_class.new('/dev/null') }
it_behaves_like 'a json logger', { 'feature_category' => 'importers' }

View File

@ -85,7 +85,7 @@ RSpec.describe ProjectImportState, type: :model, feature_category: :importers do
it 'logs error when update column fails' do
allow(import_state).to receive(:update_column).and_raise(ActiveRecord::ActiveRecordError)
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect_next_instance_of(::Import::Framework::Logger) do |logger|
expect(logger).to receive(:error).with(
{
error: 'ActiveRecord::ActiveRecordError',

View File

@ -64,14 +64,14 @@ RSpec.describe Groups::ImportExport::ImportService, feature_category: :importers
let(:group) { create(:group) }
let(:import_file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
let(:import_logger) { instance_double(Gitlab::Import::Logger) }
let(:import_logger) { instance_double(::Import::Framework::Logger) }
subject(:service) { described_class.new(group: group, user: user) }
before do
ImportExportUpload.create!(group: group, import_file: import_file)
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
allow(::Import::Framework::Logger).to receive(:build).and_return(import_logger)
allow(import_logger).to receive(:error)
allow(import_logger).to receive(:info)
allow(import_logger).to receive(:warn)
@ -181,7 +181,7 @@ RSpec.describe Groups::ImportExport::ImportService, feature_category: :importers
end
it 'logs the import success' do
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
allow(::Import::Framework::Logger).to receive(:build).and_return(import_logger)
expect(import_logger).to receive(:info).with(
group_id: group.id,
@ -207,7 +207,7 @@ RSpec.describe Groups::ImportExport::ImportService, feature_category: :importers
let(:group) { create(:group) }
let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export.tar.gz') }
let(:import_logger) { instance_double(Gitlab::Import::Logger) }
let(:import_logger) { instance_double(::Import::Framework::Logger) }
subject(:service) { described_class.new(group: group, user: user) }
@ -215,7 +215,7 @@ RSpec.describe Groups::ImportExport::ImportService, feature_category: :importers
group.add_owner(user)
ImportExportUpload.create!(group: group, import_file: import_file)
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
allow(::Import::Framework::Logger).to receive(:build).and_return(import_logger)
allow(import_logger).to receive(:error)
allow(import_logger).to receive(:warn)
allow(import_logger).to receive(:info)

View File

@ -143,7 +143,7 @@ RSpec.describe Import::BitbucketServerService, feature_category: :importers do
allow(client).to receive(:repo).and_raise(exception)
expect(Gitlab::Import::Logger).not_to receive(:error)
expect(::Import::Framework::Logger).not_to receive(:error)
expect { subject.execute(credentials) }.to raise_error(exception)
end

View File

@ -186,7 +186,7 @@ RSpec.describe Import::BitbucketService, feature_category: :importers do
allow(client).to receive(:user).and_return(bitbucket_user)
end
expect(Gitlab::Import::Logger)
expect(::Import::Framework::Logger)
.to receive(:error)
.with(
message: 'BitBucket Cloud import failed',

View File

@ -46,7 +46,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
end
it 'logs the original error' do
expect(Gitlab::Import::Logger).to receive(:error).with({
expect(::Import::Framework::Logger).to receive(:error).with({
message: 'Import failed because of a GitHub error',
status: 404,
error: 'Not Found'
@ -71,7 +71,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
expect(client).to receive_message_chain(:octokit, :repository).and_raise(exception)
expect(Gitlab::Import::Logger).not_to receive(:error)
expect(::Import::Framework::Logger).not_to receive(:error)
expect { subject.execute(access_params, :github) }.to raise_error(exception)
end
@ -278,7 +278,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
it 'rescues and logs the error' do
allow(client).to receive_message_chain(:octokit, :repository).and_raise(exception)
expect(Gitlab::Import::Logger).to receive(:error).with({
expect(::Import::Framework::Logger).to receive(:error).with({
message: 'Import failed because of a GitHub error',
status: 500,
error: 'Internal Server Error'
@ -355,7 +355,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
it 'returns and logs an error' do
allow(github_importer).to receive(:url).and_return(url)
expect(Gitlab::Import::Logger).to receive(:error).with({
expect(::Import::Framework::Logger).to receive(:error).with({
message: message,
error: error
}).and_call_original

View File

@ -9,7 +9,7 @@ RSpec.describe ImportExportCleanUpService, feature_category: :importers do
let(:tmp_import_export_folder) { 'tmp/gitlab_exports' }
before do
allow_next_instance_of(Gitlab::Import::Logger) do |logger|
allow_next_instance_of(::Import::Framework::Logger) do |logger|
allow(logger).to receive(:info)
end
end
@ -28,7 +28,7 @@ RSpec.describe ImportExportCleanUpService, feature_category: :importers do
context 'when the import/export tmp storage directory exists' do
shared_examples 'removes old tmp files' do |subdir|
it 'removes old files and logs' do
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect_next_instance_of(::Import::Framework::Logger) do |logger|
expect(logger)
.to receive(:info)
.with(
@ -41,7 +41,7 @@ RSpec.describe ImportExportCleanUpService, feature_category: :importers do
end
it 'does not remove new files or logs' do
expect(Gitlab::Import::Logger).not_to receive(:new)
expect(::Import::Framework::Logger).not_to receive(:new)
validate_cleanup(subdir: subdir, mtime: 2.hours.ago, expected: true)
end
@ -59,7 +59,7 @@ RSpec.describe ImportExportCleanUpService, feature_category: :importers do
export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz')
)
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect_next_instance_of(::Import::Framework::Logger) do |logger|
expect(logger)
.to receive(:info)
.with(
@ -81,7 +81,7 @@ RSpec.describe ImportExportCleanUpService, feature_category: :importers do
export_file: fixture_file_upload('spec/fixtures/project_export.tar.gz')
)
expect(Gitlab::Import::Logger).not_to receive(:new)
expect(::Import::Framework::Logger).not_to receive(:new)
expect { service.execute }.not_to change { upload.reload.export_file.file.nil? }

View File

@ -66,7 +66,7 @@ RSpec.describe Projects::LfsPointers::LfsLinkService, feature_category: :source_
expect(Gitlab::Metrics::Lfs).to receive_message_chain(:validate_link_objects_error_rate, :increment).with(
error: false, labels: {})
expect(Gitlab::Import::Logger).to receive(:info).with(
expect(::Import::Framework::Logger).to receive(:info).with(
class: described_class.name,
project_id: project.id,
project_path: project.full_path,

View File

@ -154,7 +154,7 @@ RSpec.describe Gitlab::GithubImport::ObjectImporter, :aggregate_failures, featur
}
)
expect(Gitlab::Import::Logger)
expect(::Import::Framework::Logger)
.to receive(:error)
.with(
{

View File

@ -78,7 +78,7 @@ RSpec.describe Projects::AfterImportWorker, feature_category: :importers do
expect(instance).to receive(:execute).and_raise(exception)
end
expect(Gitlab::Import::Logger).to receive(:info).with(
expect(::Import::Framework::Logger).to receive(:info).with(
{
message: 'Project housekeeping failed',
project_full_path: project.full_path,