Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
f62c9f693f
commit
20ddcb963c
|
|
@ -1,5 +0,0 @@
|
|||
---
|
||||
# Cop supports --autocorrect.
|
||||
Style/RedundantParentheses:
|
||||
Exclude:
|
||||
- 'lib/gitlab/database/tables_truncate.rb'
|
||||
|
|
@ -19,6 +19,7 @@ module Types
|
|||
description: 'Value of the variable.'
|
||||
|
||||
field :value_options, [GraphQL::Types::String],
|
||||
hash_key: :options,
|
||||
null: true,
|
||||
description: 'Value options for the variable.'
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1790,7 +1790,11 @@ class User < ApplicationRecord
|
|||
|
||||
def notification_email_for(notification_group)
|
||||
# Return group-specific email address if present, otherwise return global notification email address
|
||||
notification_group&.notification_email_for(self) || notification_email_or_default
|
||||
group_email = if notification_group && notification_group.respond_to?(:notification_email_for)
|
||||
notification_group.notification_email_for(self)
|
||||
end
|
||||
|
||||
group_email || notification_email_or_default
|
||||
end
|
||||
|
||||
def notification_settings_for(source, inherit: false)
|
||||
|
|
|
|||
|
|
@ -1,155 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Clusters
|
||||
module Applications
|
||||
class PrometheusConfigService
|
||||
def initialize(project, cluster, app)
|
||||
@project = project
|
||||
@cluster = cluster
|
||||
@app = app
|
||||
end
|
||||
|
||||
def execute(config = {})
|
||||
if has_alerts?
|
||||
generate_alert_manager(config)
|
||||
else
|
||||
reset_alert_manager(config)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :project, :cluster, :app
|
||||
|
||||
def reset_alert_manager(config)
|
||||
config = set_alert_manager_enabled(config, false)
|
||||
config.delete('alertmanagerFiles')
|
||||
config['serverFiles'] ||= {}
|
||||
config['serverFiles']['alerts'] = {}
|
||||
|
||||
config
|
||||
end
|
||||
|
||||
def generate_alert_manager(config)
|
||||
config = set_alert_manager_enabled(config, true)
|
||||
config = set_alert_manager_files(config)
|
||||
|
||||
set_alert_manager_groups(config)
|
||||
end
|
||||
|
||||
def set_alert_manager_enabled(config, enabled)
|
||||
config['alertmanager'] ||= {}
|
||||
config['alertmanager']['enabled'] = enabled
|
||||
|
||||
config
|
||||
end
|
||||
|
||||
def set_alert_manager_files(config)
|
||||
config['alertmanagerFiles'] = {
|
||||
'alertmanager.yml' => {
|
||||
'receivers' => alert_manager_receivers_params,
|
||||
'route' => alert_manager_route_params
|
||||
}
|
||||
}
|
||||
|
||||
config
|
||||
end
|
||||
|
||||
def set_alert_manager_groups(config)
|
||||
config['serverFiles'] ||= {}
|
||||
config['serverFiles']['alerts'] ||= {}
|
||||
config['serverFiles']['alerts']['groups'] ||= []
|
||||
|
||||
environments_with_alerts.each do |env_name, alerts|
|
||||
index = config['serverFiles']['alerts']['groups'].find_index do |group|
|
||||
group['name'] == env_name
|
||||
end
|
||||
|
||||
if index
|
||||
config['serverFiles']['alerts']['groups'][index]['rules'] = alerts
|
||||
else
|
||||
config['serverFiles']['alerts']['groups'] << {
|
||||
'name' => env_name,
|
||||
'rules' => alerts
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
config
|
||||
end
|
||||
|
||||
def alert_manager_receivers_params
|
||||
[
|
||||
{
|
||||
'name' => 'gitlab',
|
||||
'webhook_configs' => [
|
||||
{
|
||||
'url' => notify_url,
|
||||
'send_resolved' => true,
|
||||
'http_config' => {
|
||||
'bearer_token' => alert_manager_token
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
def alert_manager_token
|
||||
app.alert_manager_token
|
||||
end
|
||||
|
||||
def alert_manager_route_params
|
||||
{
|
||||
'receiver' => 'gitlab',
|
||||
'group_wait' => '30s',
|
||||
'group_interval' => '5m',
|
||||
'repeat_interval' => '4h'
|
||||
}
|
||||
end
|
||||
|
||||
def notify_url
|
||||
::Gitlab::Routing.url_helpers
|
||||
.notify_project_prometheus_alerts_url(project, format: :json)
|
||||
end
|
||||
|
||||
def has_alerts?
|
||||
environments_with_alerts.values.flatten(1).any?
|
||||
end
|
||||
|
||||
def environments_with_alerts
|
||||
@environments_with_alerts ||=
|
||||
environments.each_with_object({}) do |environment, hash|
|
||||
name = rule_name(environment)
|
||||
hash[name] = alerts(environment)
|
||||
end
|
||||
end
|
||||
|
||||
def rule_name(environment)
|
||||
"#{environment.name}.rules"
|
||||
end
|
||||
|
||||
def alerts(environment)
|
||||
alerts = Projects::Prometheus::AlertsFinder
|
||||
.new(environment: environment)
|
||||
.execute
|
||||
|
||||
alerts.map do |alert|
|
||||
hash = alert.to_param
|
||||
hash['expr'] = substitute_query_variables(hash['expr'], environment)
|
||||
hash
|
||||
end
|
||||
end
|
||||
|
||||
def substitute_query_variables(query, environment)
|
||||
result = ::Prometheus::ProxyVariableSubstitutionService.new(environment, query: query).execute
|
||||
|
||||
result[:params][:query]
|
||||
end
|
||||
|
||||
def environments
|
||||
project.environments_for_scope(cluster.environment_scope)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -142,12 +142,16 @@ class PostReceive
|
|||
def emit_snowplow_event(project, user)
|
||||
return unless Feature.enabled?(:route_hll_to_snowplow_phase2, project.namespace)
|
||||
|
||||
metric_path = 'counts.source_code_pushes'
|
||||
Gitlab::Tracking.event(
|
||||
'PostReceive',
|
||||
'source_code_pushes',
|
||||
:push,
|
||||
project: project,
|
||||
namespace: project.namespace,
|
||||
user: user
|
||||
user: user,
|
||||
property: 'source_code_pushes',
|
||||
label: metric_path,
|
||||
context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis, key_path: metric_path).to_context]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,26 +0,0 @@
|
|||
---
|
||||
description: Merge request approvals
|
||||
category: merge_requests
|
||||
action: i_code_review_user_approve_mr
|
||||
label_description:
|
||||
property_description:
|
||||
value_description:
|
||||
extra_properties:
|
||||
identifiers:
|
||||
- project
|
||||
- user
|
||||
- namespace
|
||||
product_section: 'TBD'
|
||||
product_stage: create
|
||||
product_group: code_review
|
||||
product_category: code_review
|
||||
milestone: "15.2"
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/91493
|
||||
distributions:
|
||||
- ce
|
||||
- ee
|
||||
tiers:
|
||||
- free
|
||||
- premium
|
||||
- ultimate
|
||||
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
---
|
||||
description: All events of Git push operations
|
||||
category: PostReceive
|
||||
action: source_code_pushes
|
||||
label_description:
|
||||
property_description:
|
||||
value_description:
|
||||
extra_properties:
|
||||
identifiers:
|
||||
- project
|
||||
- user
|
||||
- namespace
|
||||
product_section: dev
|
||||
product_stage: create
|
||||
product_group: source_code
|
||||
product_category: source_code_management
|
||||
milestone: "15.2"
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/91605
|
||||
distributions:
|
||||
- ce
|
||||
- ee
|
||||
tiers:
|
||||
- free
|
||||
- premium
|
||||
- ultimate
|
||||
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
---
|
||||
description: Mirrored Redis source_code_pushes events sent to Snowplow
|
||||
category: PostReceive
|
||||
action: push
|
||||
identifiers:
|
||||
- project
|
||||
- user
|
||||
- namespace
|
||||
product_section: dev
|
||||
product_stage: create
|
||||
product_group: source_code
|
||||
product_category: source_code_management
|
||||
milestone: "15.7"
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104670
|
||||
distributions:
|
||||
- ce
|
||||
- ee
|
||||
tiers:
|
||||
- free
|
||||
- premium
|
||||
- ultimate
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
description: Mirrored RedisHLL i_code_review_user_approve_mr_monthly events sent to Snowplow
|
||||
category: Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter
|
||||
action: approve
|
||||
identifiers:
|
||||
- project
|
||||
- user
|
||||
- namespace
|
||||
product_stage: create
|
||||
product_group: code_review
|
||||
product_category: code_review
|
||||
product_section: 'TBD'
|
||||
milestone: "15.7"
|
||||
introduced_by_url: "https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104670"
|
||||
distributions:
|
||||
- ce
|
||||
- ee
|
||||
tiers:
|
||||
- free
|
||||
- premium
|
||||
- ultimate
|
||||
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class FinalizeGroupMemberNamespaceIdMigration < Gitlab::Database::Migration[2.0]
|
||||
MIGRATION = 'BackfillMemberNamespaceForGroupMembers'
|
||||
disable_ddl_transaction!
|
||||
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
|
||||
def up
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: MIGRATION,
|
||||
table_name: :members,
|
||||
column_name: :id,
|
||||
job_arguments: [],
|
||||
finalize: true
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
# no-op
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
781ed5eaf05091e5d01ec23a9f66f3722c41b4a87ecdabe48158ce82c5cbb325
|
||||
|
|
@ -35,23 +35,23 @@ or trying to evaluate Geo for a future clusterized installation.
|
|||
A single instance can be expanded to a clusterized version using Patroni, which is recommended for a
|
||||
highly available architecture.
|
||||
|
||||
Follow below the instructions on how to set up PostgreSQL replication as a single instance database.
|
||||
Follow the instructions below on how to set up PostgreSQL replication as a single instance database.
|
||||
Alternatively, you can look at the [Multi-node database replication](#multi-node-database-replication)
|
||||
instructions on setting up replication with a Patroni cluster.
|
||||
|
||||
### PostgreSQL replication
|
||||
|
||||
The GitLab **primary** site where the write operations happen connects to
|
||||
the **primary** database server, and **secondary** sites
|
||||
the **primary** database server. **Secondary** sites
|
||||
connect to their own database servers (which are read-only).
|
||||
|
||||
We recommend using [PostgreSQL replication slots](https://medium.com/@tk512/replication-slots-in-postgresql-b4b03d277c75)
|
||||
You should use [PostgreSQL's replication slots](https://medium.com/@tk512/replication-slots-in-postgresql-b4b03d277c75)
|
||||
to ensure that the **primary** site retains all the data necessary for the **secondary** sites to
|
||||
recover. See below for more details.
|
||||
|
||||
The following guide assumes that:
|
||||
|
||||
- You are using Omnibus and therefore you are using PostgreSQL 12 or later
|
||||
- You are using Omnibus and therefore you are using PostgreSQL 12 or later,
|
||||
which includes the [`pg_basebackup` tool](https://www.postgresql.org/docs/12/app-pgbasebackup.html).
|
||||
- You have a **primary** site already set up (the GitLab server you are
|
||||
replicating from), running Omnibus' PostgreSQL (or equivalent version), and
|
||||
|
|
@ -120,8 +120,8 @@ There is an [issue where support is being discussed](https://gitlab.com/gitlab-o
|
|||
|
||||
1. Define a password for the database [replication user](https://wiki.postgresql.org/wiki/Streaming_Replication).
|
||||
|
||||
We will use the username defined in `/etc/gitlab/gitlab.rb` under the `postgresql['sql_replication_user']`
|
||||
setting. The default value is `gitlab_replicator`, but if you changed it to something else, adapt
|
||||
Use the username defined in `/etc/gitlab/gitlab.rb` under the `postgresql['sql_replication_user']`
|
||||
setting. The default value is `gitlab_replicator`. If you changed the username to something else, adapt
|
||||
the instructions below.
|
||||
|
||||
Generate a MD5 hash of the desired password:
|
||||
|
|
@ -141,7 +141,7 @@ There is an [issue where support is being discussed](https://gitlab.com/gitlab-o
|
|||
```
|
||||
|
||||
If you are using an external database not managed by Omnibus GitLab, you need
|
||||
to create the replicator user and define a password to it manually:
|
||||
to create the `gitlab_replicator` user and define a password for that user manually:
|
||||
|
||||
```sql
|
||||
--- Create a new user 'replicator'
|
||||
|
|
@ -155,16 +155,16 @@ There is an [issue where support is being discussed](https://gitlab.com/gitlab-o
|
|||
|
||||
For security reasons, PostgreSQL does not listen on any network interfaces
|
||||
by default. However, Geo requires the **secondary** site to be able to
|
||||
connect to the **primary** site's database. For this reason, we need the IP address of
|
||||
connect to the **primary** site's database. For this reason, you need the IP address of
|
||||
each site.
|
||||
|
||||
NOTE:
|
||||
For external PostgreSQL instances, see [additional instructions](external_database.md).
|
||||
|
||||
If you are using a cloud provider, you can lookup the addresses for each
|
||||
If you are using a cloud provider, you can look up the addresses for each
|
||||
Geo site through your cloud provider's management console.
|
||||
|
||||
To lookup the address of a Geo site, SSH in to the Geo site and execute:
|
||||
To look up the address of a Geo site, SSH into the Geo site and execute:
|
||||
|
||||
```shell
|
||||
##
|
||||
|
|
@ -187,7 +187,7 @@ There is an [issue where support is being discussed](https://gitlab.com/gitlab-o
|
|||
| `postgresql['md5_auth_cidr_addresses']` | **Primary** and **Secondary** sites' public or VPC private addresses. |
|
||||
|
||||
If you are using Google Cloud Platform, SoftLayer, or any other vendor that
|
||||
provides a virtual private cloud (VPC) you can use the **primary** and **secondary** sites
|
||||
provides a virtual private cloud (VPC), you can use the **primary** and **secondary** sites'
|
||||
private addresses (corresponds to "internal address" for Google Cloud Platform) for
|
||||
`postgresql['md5_auth_cidr_addresses']` and `postgresql['listen_address']`.
|
||||
|
||||
|
|
@ -200,10 +200,10 @@ There is an [issue where support is being discussed](https://gitlab.com/gitlab-o
|
|||
`127.0.0.1/32` to the `postgresql['md5_auth_cidr_addresses']` setting, to allow Rails to connect through
|
||||
`127.0.0.1`. For more information, see [omnibus-5258](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5258).
|
||||
|
||||
Depending on your network configuration, the suggested addresses may not
|
||||
be correct. If your **primary** site and **secondary** sites connect over a local
|
||||
Depending on your network configuration, the suggested addresses may
|
||||
be incorrect. If your **primary** site and **secondary** sites connect over a local
|
||||
area network, or a virtual network connecting availability zones like
|
||||
[Amazon's VPC](https://aws.amazon.com/vpc/) or [Google's VPC](https://cloud.google.com/vpc/)
|
||||
[Amazon's VPC](https://aws.amazon.com/vpc/) or [Google's VPC](https://cloud.google.com/vpc/),
|
||||
you should use the **secondary** site's private address for `postgresql['md5_auth_cidr_addresses']`.
|
||||
|
||||
Edit `/etc/gitlab/gitlab.rb` and add the following, replacing the IP
|
||||
|
|
@ -286,12 +286,12 @@ There is an [issue where support is being discussed](https://gitlab.com/gitlab-o
|
|||
```
|
||||
|
||||
1. Now that the PostgreSQL server is set up to accept remote connections, run
|
||||
`netstat -plnt | grep 5432` to make sure that PostgreSQL is listening on port
|
||||
`netstat -plnt | grep 5432` to ensure that PostgreSQL is listening on port
|
||||
`5432` to the **primary** site's private address.
|
||||
|
||||
1. A certificate was automatically generated when GitLab was reconfigured. This
|
||||
is used automatically to protect your PostgreSQL traffic from
|
||||
eavesdroppers, but to protect against active ("man-in-the-middle") attackers,
|
||||
eavesdroppers. To protect against active ("man-in-the-middle") attackers,
|
||||
the **secondary** site needs a copy of the certificate. Make a copy of the PostgreSQL
|
||||
`server.crt` file on the **primary** site by running this command:
|
||||
|
||||
|
|
@ -299,26 +299,26 @@ There is an [issue where support is being discussed](https://gitlab.com/gitlab-o
|
|||
cat ~gitlab-psql/data/server.crt
|
||||
```
|
||||
|
||||
Copy the output into a clipboard or into a local file. You
|
||||
Copy the output to the clipboard or into a local file. You
|
||||
need it when setting up the **secondary** site! The certificate is not sensitive
|
||||
data.
|
||||
|
||||
However, this certificate is created with a generic `PostgreSQL` Common Name. For this,
|
||||
you must use the `verify-ca` mode when replicating the database, otherwise,
|
||||
the hostname mismatch will cause errors.
|
||||
the hostname mismatch causes errors.
|
||||
|
||||
1. Optional. Generate your own SSL certificate and manually
|
||||
[configure SSL for PostgreSQL](https://docs.gitlab.com/omnibus/settings/database.html#configuring-ssl),
|
||||
instead of using the generated certificate.
|
||||
|
||||
You will need at least the SSL certificate and key, and set the `postgresql['ssl_cert_file']` and
|
||||
You need at least the SSL certificate and key. Set the `postgresql['ssl_cert_file']` and
|
||||
`postgresql['ssl_key_file']` values to their full paths, as per the Database SSL docs.
|
||||
|
||||
This allows you to use the `verify-full` SSL mode when replicating the database
|
||||
and get the extra benefit of verifying the full hostname in the CN.
|
||||
|
||||
You can use this certificate (that you have also set in `postgresql['ssl_cert_file']`) instead
|
||||
of the certificate from the point above going forward. This will allow you to use `verify-full`
|
||||
of the certificate from the point above going forward. This allows you to use `verify-full`
|
||||
without replication errors if the CN matches.
|
||||
|
||||
#### Step 2. Configure the **secondary** server
|
||||
|
|
@ -337,7 +337,7 @@ There is an [issue where support is being discussed](https://gitlab.com/gitlab-o
|
|||
```
|
||||
|
||||
NOTE:
|
||||
This step is important so we don't try to execute anything before the site is fully configured.
|
||||
This step is important so you don't try to execute anything before the site is fully configured.
|
||||
|
||||
1. [Check TCP connectivity](../../raketasks/maintenance.md) to the **primary** site's PostgreSQL server:
|
||||
|
||||
|
|
@ -348,7 +348,7 @@ There is an [issue where support is being discussed](https://gitlab.com/gitlab-o
|
|||
NOTE:
|
||||
If this step fails, you may be using the wrong IP address, or a firewall may
|
||||
be preventing access to the site. Check the IP address, paying close
|
||||
attention to the difference between public and private addresses and ensure
|
||||
attention to the difference between public and private addresses. Ensure
|
||||
that, if a firewall is present, the **secondary** site is permitted to connect to the
|
||||
**primary** site on port 5432.
|
||||
|
||||
|
|
@ -389,14 +389,14 @@ There is an [issue where support is being discussed](https://gitlab.com/gitlab-o
|
|||
```
|
||||
|
||||
NOTE:
|
||||
If you are using manually generated certificates and plan on using
|
||||
`sslmode=verify-full` to benefit of the full hostname verification,
|
||||
make sure to replace `verify-ca` to `verify-full` when
|
||||
If you are using manually generated certificates and want to use
|
||||
`sslmode=verify-full` to benefit from the full hostname verification,
|
||||
replace `verify-ca` with `verify-full` when
|
||||
running the command.
|
||||
|
||||
When prompted enter the _plaintext_ password you set in the first step for the
|
||||
When prompted, enter the _plaintext_ password you set in the first step for the
|
||||
`gitlab_replicator` user. If all worked correctly, you should see
|
||||
the list of **primary** site's databases.
|
||||
the list of the **primary** site's databases.
|
||||
|
||||
A failure to connect here indicates that the TLS configuration is incorrect.
|
||||
Ensure that the contents of `~gitlab-psql/data/server.crt` on the **primary** site
|
||||
|
|
@ -404,8 +404,8 @@ There is an [issue where support is being discussed](https://gitlab.com/gitlab-o
|
|||
|
||||
1. Configure PostgreSQL:
|
||||
|
||||
This step is similar to how we configured the **primary** instance.
|
||||
We must enable this, even if using a single node.
|
||||
This step is similar to how you configured the **primary** instance.
|
||||
You must enable this, even if using a single node.
|
||||
|
||||
Edit `/etc/gitlab/gitlab.rb` and add the following, replacing the IP
|
||||
addresses with addresses appropriate to your network configuration:
|
||||
|
|
@ -450,12 +450,12 @@ There is an [issue where support is being discussed](https://gitlab.com/gitlab-o
|
|||
|
||||
#### Step 3. Initiate the replication process
|
||||
|
||||
Below we provide a script that connects the database on the **secondary** site to
|
||||
the database on the **primary** site, replicates the database, and creates the
|
||||
Below is a script that connects the database on the **secondary** site to
|
||||
the database on the **primary** site. This script replicates the database and creates the
|
||||
needed files for streaming replication.
|
||||
|
||||
The directories used are the defaults that are set up in Omnibus. If you have
|
||||
changed any defaults, configure it as you see fit replacing the directories and paths.
|
||||
changed any defaults, configure the script accordingly, replacing any directories and paths.
|
||||
|
||||
WARNING:
|
||||
Make sure to run this on the **secondary** site as it removes all PostgreSQL's
|
||||
|
|
@ -469,7 +469,7 @@ data before running `pg_basebackup`.
|
|||
|
||||
1. Choose a database-friendly name to use for your **secondary** site to
|
||||
use as the replication slot name. For example, if your domain is
|
||||
`secondary.geo.example.com`, you may use `secondary_example` as the slot
|
||||
`secondary.geo.example.com`, use `secondary_example` as the slot
|
||||
name as shown in the commands below.
|
||||
|
||||
1. Execute the command below to start a backup/restore and begin the replication
|
||||
|
|
@ -492,33 +492,33 @@ data before running `pg_basebackup`.
|
|||
```
|
||||
|
||||
NOTE:
|
||||
If you have generated custom PostgreSQL certificates, you will want to use
|
||||
If you have generated custom PostgreSQL certificates, you need to use
|
||||
`--sslmode=verify-full` (or omit the `sslmode` line entirely), to benefit from the extra
|
||||
validation of the full host name in the certificate CN / SAN for additional security.
|
||||
Otherwise, using the automatically created certificate with `verify-full` will fail,
|
||||
as it has a generic `PostgreSQL` CN which will not match the `--host` value in this command.
|
||||
Otherwise, using the automatically created certificate with `verify-full` fails,
|
||||
as it has a generic `PostgreSQL` CN which doesn't match the `--host` value in this command.
|
||||
|
||||
This command also takes a number of additional options. You can use `--help`
|
||||
to list them all, but here are a couple of tips:
|
||||
to list them all, but here are some tips:
|
||||
|
||||
- If PostgreSQL is listening on a non-standard port, add `--port=` as well.
|
||||
- If PostgreSQL is listening on a non-standard port, add `--port=`.
|
||||
- If your database is too large to be transferred in 30 minutes, you need
|
||||
to increase the timeout, for example, `--backup-timeout=3600` if you expect the
|
||||
to increase the timeout. For example, use `--backup-timeout=3600` if you expect the
|
||||
initial replication to take under an hour.
|
||||
- Pass `--sslmode=disable` to skip PostgreSQL TLS authentication altogether
|
||||
(for example, you know the network path is secure, or you are using a site-to-site
|
||||
VPN). It is **not** safe over the public Internet!
|
||||
- You can read more details about each `sslmode` in the
|
||||
[PostgreSQL documentation](https://www.postgresql.org/docs/12/libpq-ssl.html#LIBPQ-SSL-PROTECTION);
|
||||
the instructions above are carefully written to ensure protection against
|
||||
[PostgreSQL documentation](https://www.postgresql.org/docs/12/libpq-ssl.html#LIBPQ-SSL-PROTECTION).
|
||||
The instructions above are carefully written to ensure protection against
|
||||
both passive eavesdroppers and active "man-in-the-middle" attackers.
|
||||
- Change the `--slot-name` to the name of the replication slot
|
||||
to be used on the **primary** database. The script attempts to create the
|
||||
replication slot automatically if it does not exist.
|
||||
- If you're repurposing an old site into a Geo **secondary** site, you must
|
||||
add `--force` to the command line.
|
||||
- When not in a production machine you can disable backup step if you
|
||||
really sure this is what you want by adding `--skip-backup`
|
||||
- When not in a production machine, you can disable the backup step (if you
|
||||
are certain this is what you want) by adding `--skip-backup`.
|
||||
- If you are using PgBouncer, you need to target the database host directly.
|
||||
- If you are using Patroni on your primary site, you must target the current leader host.
|
||||
- If you are using a load balancer proxy (for example HAProxy) and it is targeting the Patroni leader for the primary, you should target the load balancer proxy instead.
|
||||
|
|
@ -531,9 +531,9 @@ The replication process is now complete.
|
|||
PostgreSQL connections, which can improve performance even when using in a
|
||||
single instance installation.
|
||||
|
||||
We recommend using PgBouncer if you use GitLab in a highly available
|
||||
You should use PgBouncer if you use GitLab in a highly available
|
||||
configuration with a cluster of nodes supporting a Geo **primary** site and
|
||||
two other clusters of nodes supporting a Geo **secondary** site. One for the
|
||||
two other clusters of nodes supporting a Geo **secondary** site. You need two PgBouncer nodes: one for the
|
||||
main database and the other for the tracking database. For more information,
|
||||
see [High Availability with Omnibus GitLab](../../postgresql/replication_and_failover.md).
|
||||
|
||||
|
|
@ -545,7 +545,7 @@ when using Omnibus-managed PostgreSQL instances:
|
|||
On the GitLab Geo **primary** site:
|
||||
|
||||
1. The default value for the replication user is `gitlab_replicator`, but if you've set a custom replication
|
||||
user in your `/etc/gitlab/gitlab.rb` under the `postgresql['sql_replication_user']` setting, make sure to
|
||||
user in your `/etc/gitlab/gitlab.rb` under the `postgresql['sql_replication_user']` setting, ensure you
|
||||
adapt the following instructions for your own user.
|
||||
|
||||
Generate an MD5 hash of the desired password:
|
||||
|
|
@ -577,7 +577,7 @@ On the GitLab Geo **primary** site:
|
|||
```
|
||||
|
||||
Until the password is updated on any **secondary** sites, the [PostgreSQL log](../../logs/index.md#postgresql-logs) on
|
||||
the secondaries will report the following error message:
|
||||
the secondaries report the following error message:
|
||||
|
||||
```console
|
||||
FATAL: could not connect to the primary server: FATAL: password authentication failed for user "gitlab_replicator"
|
||||
|
|
@ -619,16 +619,16 @@ If you still haven't [migrated from repmgr to Patroni](#migrating-from-repmgr-to
|
|||
|
||||
### Migrating from repmgr to Patroni
|
||||
|
||||
1. Before migrating, we recommend that there is no replication lag between the **primary** and **secondary** sites and that replication is paused. In GitLab 13.2 and later, you can pause and resume replication with `gitlab-ctl geo-replication-pause` and `gitlab-ctl geo-replication-resume` on a Geo secondary database node.
|
||||
1. Before migrating, you should ensure there is no replication lag between the **primary** and **secondary** sites and that replication is paused. In GitLab 13.2 and later, you can pause and resume replication with `gitlab-ctl geo-replication-pause` and `gitlab-ctl geo-replication-resume` on a Geo secondary database node.
|
||||
1. Follow the [instructions to migrate repmgr to Patroni](../../postgresql/replication_and_failover.md#switching-from-repmgr-to-patroni). When configuring Patroni on each **primary** site database node, add `patroni['replication_slots'] = { '<slot_name>' => 'physical' }`
|
||||
to `gitlab.rb` where `<slot_name>` is the name of the replication slot for your **secondary** site. This ensures that Patroni recognizes the replication slot as permanent and not drop it upon restarting.
|
||||
1. If database replication to the **secondary** site was paused before migration, resume replication after Patroni is confirmed working on the **primary** site.
|
||||
to `gitlab.rb` where `<slot_name>` is the name of the replication slot for your **secondary** site. This ensures that Patroni recognizes the replication slot as permanent and doesn't drop it upon restarting.
|
||||
1. If database replication to the **secondary** site was paused before migration, resume replication after Patroni is confirmed as working on the **primary** site.
|
||||
|
||||
### Migrating a single PostgreSQL node to Patroni
|
||||
|
||||
Before the introduction of Patroni, Geo had no Omnibus support for HA setups on the **secondary** site.
|
||||
|
||||
With Patroni it's now possible to support that. To migrate the existing PostgreSQL to Patroni:
|
||||
With Patroni, this support is now possible. To migrate the existing PostgreSQL to Patroni:
|
||||
|
||||
1. Make sure you have a Consul cluster setup on the secondary (similar to how you set it up on the **primary** site).
|
||||
1. [Configure a permanent replication slot](#step-1-configure-patroni-permanent-replication-slot-on-the-primary-site).
|
||||
|
|
@ -637,23 +637,23 @@ With Patroni it's now possible to support that. To migrate the existing PostgreS
|
|||
1. [Configure a Standby Cluster](#step-4-configure-a-standby-cluster-on-the-secondary-site)
|
||||
on that single node machine.
|
||||
|
||||
You end up with a “Standby Cluster” with a single node. That allows you to later on add additional Patroni nodes by following the same instructions above.
|
||||
You end up with a “Standby Cluster” with a single node. That allows you to add additional Patroni nodes by following the same instructions above.
|
||||
|
||||
### Patroni support
|
||||
|
||||
Patroni is the official replication management solution for Geo. It
|
||||
Patroni is the official replication management solution for Geo. Patroni
|
||||
can be used to build a highly available cluster on the **primary** and a **secondary** Geo site.
|
||||
Using Patroni on a **secondary** site is optional and you don't have to use the same amount of
|
||||
Using Patroni on a **secondary** site is optional and you don't have to use the same number of
|
||||
nodes on each Geo site.
|
||||
|
||||
For instructions about how to set up Patroni on the primary site, see the
|
||||
For instructions on how to set up Patroni on the primary site, see the
|
||||
[PostgreSQL replication and failover with Omnibus GitLab](../../postgresql/replication_and_failover.md#patroni) page.
|
||||
|
||||
#### Configuring Patroni cluster for a Geo secondary site
|
||||
|
||||
In a Geo secondary site, the main PostgreSQL database is a read-only replica of the primary site's PostgreSQL database.
|
||||
|
||||
If you are currently using `repmgr` on your Geo primary site, see [these instructions](#migrating-from-repmgr-to-patroni)
|
||||
If you are using `repmgr` on your Geo primary site, see [these instructions](#migrating-from-repmgr-to-patroni)
|
||||
for migrating from `repmgr` to Patroni.
|
||||
|
||||
A production-ready and secure setup requires at least:
|
||||
|
|
@ -664,14 +664,14 @@ A production-ready and secure setup requires at least:
|
|||
- 1 internal load-balancer _(primary site only)_
|
||||
|
||||
The internal load balancer provides a single endpoint for connecting to the Patroni cluster's leader whenever a new leader is
|
||||
elected, and it is required for enabling cascading replication from the secondary sites.
|
||||
elected. The load balancer is required for enabling cascading replication from the secondary sites.
|
||||
|
||||
Be sure to use [password credentials](../../postgresql/replication_and_failover.md#database-authorization-for-patroni)
|
||||
and other database best practices.
|
||||
|
||||
##### Step 1. Configure Patroni permanent replication slot on the primary site
|
||||
|
||||
To set up database replication with Patroni on a secondary site, we must
|
||||
To set up database replication with Patroni on a secondary site, you must
|
||||
configure a _permanent replication slot_ on the primary site's Patroni cluster,
|
||||
and ensure password authentication is used.
|
||||
|
||||
|
|
@ -737,8 +737,8 @@ Leader instance**:
|
|||
##### Step 2. Configure the internal load balancer on the primary site
|
||||
|
||||
To avoid reconfiguring the Standby Leader on the secondary site whenever a new
|
||||
Leader is elected on the primary site, we must set up a TCP internal load
|
||||
balancer which gives a single endpoint for connecting to the Patroni
|
||||
Leader is elected on the primary site, you should set up a TCP internal load
|
||||
balancer. This load balancer provides a single endpoint for connecting to the Patroni
|
||||
cluster's Leader.
|
||||
|
||||
The Omnibus GitLab packages do not include a Load Balancer. Here's how you
|
||||
|
|
@ -776,14 +776,14 @@ backend postgresql
|
|||
server patroni3.internal 10.6.0.23:5432 maxconn 100 check port 8008
|
||||
```
|
||||
|
||||
Refer to your preferred Load Balancer's documentation for further guidance.
|
||||
For further guidance, refer to the documentation for your preferred load balancer.
|
||||
|
||||
##### Step 3. Configure PgBouncer nodes on the secondary site
|
||||
|
||||
A production-ready and highly available configuration requires at least
|
||||
three Consul nodes, a minimum of one PgBouncer node, but it's recommended to have
|
||||
one per database node. An internal load balancer (TCP) is required when there is
|
||||
more than one PgBouncer service nodes. The internal load balancer provides a single
|
||||
three Consul nodes and a minimum of one PgBouncer node. However, it is recommended to have
|
||||
one PgBouncer node per database node. An internal load balancer (TCP) is required when there is
|
||||
more than one PgBouncer service node. The internal load balancer provides a single
|
||||
endpoint for connecting to the PgBouncer cluster. For more information,
|
||||
see [High Availability with Omnibus GitLab](../../postgresql/replication_and_failover.md).
|
||||
|
||||
|
|
@ -844,7 +844,7 @@ On each node running a PgBouncer instance on the **secondary** site:
|
|||
|
||||
NOTE:
|
||||
If you are converting a secondary site with a single PostgreSQL instance to a Patroni Cluster, you must start on the PostgreSQL instance. It becomes the Patroni Standby Leader instance,
|
||||
and then you can switch over to another replica if you need.
|
||||
and then you can switch over to another replica if you need to.
|
||||
|
||||
For each node running a Patroni instance on the secondary site:
|
||||
|
||||
|
|
@ -898,7 +898,7 @@ For each node running a Patroni instance on the secondary site:
|
|||
```
|
||||
|
||||
1. Reconfigure GitLab for the changes to take effect.
|
||||
This is required to bootstrap PostgreSQL users and settings.
|
||||
This step is required to bootstrap PostgreSQL users and settings.
|
||||
|
||||
- If this is a fresh installation of Patroni:
|
||||
|
||||
|
|
@ -918,13 +918,12 @@ For each node running a Patroni instance on the secondary site:
|
|||
|
||||
### Migrating a single tracking database node to Patroni
|
||||
|
||||
Before the introduction of Patroni, Geo had no Omnibus support for HA setups on
|
||||
Before the introduction of Patroni, Geo provided no Omnibus support for HA setups on
|
||||
the secondary site.
|
||||
|
||||
With Patroni, it's now possible to support that. Due to some restrictions on the
|
||||
Patroni implementation on Omnibus that do not allow us to manage two different
|
||||
clusters on the same machine, we recommend setting up a new Patroni cluster for
|
||||
the tracking database by following the same instructions above.
|
||||
With Patroni, it's now possible to support HA setups. However, some restrictions in Patroni
|
||||
prevent the management of two different clusters on the same machine. You should set up a new
|
||||
Patroni cluster for the tracking database by following the same instructions above.
|
||||
|
||||
The secondary nodes backfill the new tracking database, and no data
|
||||
synchronization is required.
|
||||
|
|
@ -938,8 +937,8 @@ Omnibus automatically configures a tracking database when `roles(['geo_secondary
|
|||
If you want to run this database in a highly available configuration, don't use the `geo_secondary_role` above.
|
||||
Instead, follow the instructions below.
|
||||
|
||||
A production-ready and secure setup for the tracking PostgreSQL DB requires at least three Consul nodes, two
|
||||
Patroni nodes and one PgBouncer node on the secondary site.
|
||||
A production-ready and secure setup for the tracking PostgreSQL DB requires at least three Consul nodes: two
|
||||
Patroni nodes, and one PgBouncer node on the secondary site.
|
||||
|
||||
Because of [omnibus-6587](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/6587), Consul can't track multiple
|
||||
services, so these must be different than the nodes used for the Standby Cluster database.
|
||||
|
|
@ -1069,7 +1068,7 @@ On each node running a Patroni instance on the secondary site for the PostgreSQL
|
|||
```
|
||||
|
||||
1. Reconfigure GitLab for the changes to take effect.
|
||||
This is required to bootstrap PostgreSQL users and settings:
|
||||
This step is required to bootstrap PostgreSQL users and settings:
|
||||
|
||||
```shell
|
||||
gitlab-ctl reconfigure
|
||||
|
|
|
|||
|
|
@ -12722,6 +12722,7 @@ Relationship between an epic and an issue.
|
|||
| <a id="epicissuenotes"></a>`notes` | [`NoteConnection!`](#noteconnection) | All notes on this noteable. (see [Connections](#connections)) |
|
||||
| <a id="epicissueparticipants"></a>`participants` | [`UserCoreConnection`](#usercoreconnection) | List of participants in the issue. (see [Connections](#connections)) |
|
||||
| <a id="epicissueprojectid"></a>`projectId` | [`Int!`](#int) | ID of the issue project. |
|
||||
| <a id="epicissuerelatedvulnerabilities"></a>`relatedVulnerabilities` | [`VulnerabilityConnection`](#vulnerabilityconnection) | Related vulnerabilities of the issue. (see [Connections](#connections)) |
|
||||
| <a id="epicissuerelationpath"></a>`relationPath` | [`String`](#string) | URI path of the epic-issue relation. |
|
||||
| <a id="epicissuerelativeposition"></a>`relativePosition` | [`Int`](#int) | Relative position of the issue (used for positioning in epic tree and issue boards). |
|
||||
| <a id="epicissueseverity"></a>`severity` | [`IssuableSeverity`](#issuableseverity) | Severity level of the incident. |
|
||||
|
|
@ -14408,6 +14409,7 @@ Describes an issuable resource link for incident issues.
|
|||
| <a id="issuenotes"></a>`notes` | [`NoteConnection!`](#noteconnection) | All notes on this noteable. (see [Connections](#connections)) |
|
||||
| <a id="issueparticipants"></a>`participants` | [`UserCoreConnection`](#usercoreconnection) | List of participants in the issue. (see [Connections](#connections)) |
|
||||
| <a id="issueprojectid"></a>`projectId` | [`Int!`](#int) | ID of the issue project. |
|
||||
| <a id="issuerelatedvulnerabilities"></a>`relatedVulnerabilities` | [`VulnerabilityConnection`](#vulnerabilityconnection) | Related vulnerabilities of the issue. (see [Connections](#connections)) |
|
||||
| <a id="issuerelativeposition"></a>`relativePosition` | [`Int`](#int) | Relative position of the issue (used for positioning in epic tree and issue boards). |
|
||||
| <a id="issueseverity"></a>`severity` | [`IssuableSeverity`](#issuableseverity) | Severity level of the incident. |
|
||||
| <a id="issuesladueat"></a>`slaDueAt` | [`Time`](#time) | Timestamp of when the issue SLA expires. |
|
||||
|
|
|
|||
|
|
@ -128,7 +128,7 @@ module API
|
|||
strong_memoize(:project) do
|
||||
case package_scope
|
||||
when :project
|
||||
find_project!(params[:id])
|
||||
user_project(action: :read_package)
|
||||
when :instance
|
||||
full_path = ::Packages::Conan::Metadatum.full_path_from(package_username: params[:package_username])
|
||||
find_project!(full_path)
|
||||
|
|
|
|||
|
|
@ -164,7 +164,11 @@ module Gitlab
|
|||
end
|
||||
|
||||
def include_client?
|
||||
set_values.include?(:user) || set_values.include?(:runner) || set_values.include?(:remote_ip)
|
||||
# Don't overwrite an existing more specific client id with an `ip/` one.
|
||||
original_client_id = self.class.current_context_attribute(:client_id).to_s
|
||||
return false if original_client_id.starts_with?('user/') || original_client_id.starts_with?('runner/')
|
||||
|
||||
include_user? || set_values.include?(:runner) || set_values.include?(:remote_ip)
|
||||
end
|
||||
|
||||
def include_user?
|
||||
|
|
@ -178,8 +182,8 @@ module Gitlab
|
|||
def client
|
||||
if runner
|
||||
"runner/#{runner.id}"
|
||||
elsif user
|
||||
"user/#{user.id}"
|
||||
elsif user_id
|
||||
"user/#{user_id}"
|
||||
else
|
||||
"ip/#{remote_ip}"
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ module Gitlab
|
|||
def perform
|
||||
each_sub_batch do |sub_batch|
|
||||
deleted_members_data = sub_batch.map do |m|
|
||||
{ id: m.id, source_id: m.source_id, source_type: m.source_type }
|
||||
{ id: m.id, source_id: m.source_id, source_type: m.source_type, access_level: m.access_level }
|
||||
end
|
||||
|
||||
deleted_count = sub_batch.delete_all
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ module Gitlab
|
|||
|
||||
entry :variables, Entry::Variables,
|
||||
description: 'Environment variables that will be used.',
|
||||
metadata: { allowed_value_data: %i[value description expand], allow_array_value: true },
|
||||
metadata: { allowed_value_data: %i[value description expand options] },
|
||||
reserved: true
|
||||
|
||||
entry :stages, Entry::Stages,
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ module Gitlab
|
|||
class Variable < ::Gitlab::Config::Entry::Simplifiable
|
||||
strategy :SimpleVariable, if: -> (config) { SimpleVariable.applies_to?(config) }
|
||||
strategy :ComplexVariable, if: -> (config) { ComplexVariable.applies_to?(config) }
|
||||
strategy :ComplexArrayVariable, if: -> (config) { ComplexArrayVariable.applies_to?(config) }
|
||||
|
||||
class SimpleVariable < ::Gitlab::Config::Entry::Node
|
||||
include ::Gitlab::Config::Entry::Validatable
|
||||
|
|
@ -41,20 +40,24 @@ module Gitlab
|
|||
|
||||
class ComplexVariable < ::Gitlab::Config::Entry::Node
|
||||
include ::Gitlab::Config::Entry::Validatable
|
||||
include ::Gitlab::Config::Entry::Attributable
|
||||
|
||||
class << self
|
||||
def applies_to?(config)
|
||||
config.is_a?(Hash) && !config[:value].is_a?(Array)
|
||||
config.is_a?(Hash)
|
||||
end
|
||||
end
|
||||
|
||||
attributes :value, :description, :expand, :options, prefix: :config
|
||||
|
||||
validations do
|
||||
validates :key, alphanumeric: true
|
||||
validates :config_value, alphanumeric: true, allow_nil: false, if: :config_value_defined?
|
||||
validates :config_description, alphanumeric: true, allow_nil: false, if: :config_description_defined?
|
||||
validates :config_expand, boolean: true,
|
||||
allow_nil: false,
|
||||
if: -> { ci_raw_variables_in_yaml_config_enabled? && config_expand_defined? }
|
||||
validates :config_value, alphanumeric: true, allow_nil: true
|
||||
validates :config_description, alphanumeric: true, allow_nil: true
|
||||
validates :config_expand, boolean: true, allow_nil: true, if: -> {
|
||||
ci_raw_variables_in_yaml_config_enabled?
|
||||
}
|
||||
validates :config_options, array_of_strings: true, allow_nil: true
|
||||
|
||||
validate do
|
||||
allowed_value_data = Array(opt(:allowed_value_data))
|
||||
|
|
@ -66,91 +69,43 @@ module Gitlab
|
|||
else
|
||||
errors.add(:config, "must be a string")
|
||||
end
|
||||
|
||||
if config_options.present? && config_options.exclude?(config_value)
|
||||
errors.add(:config, 'value must be present in options')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def value
|
||||
# Needed since the `Entry::Node` provides `value` (which is current hash)
|
||||
config_value.to_s
|
||||
end
|
||||
|
||||
def value_with_data
|
||||
if ci_raw_variables_in_yaml_config_enabled?
|
||||
{
|
||||
value: value,
|
||||
raw: (!config_expand if config_expand_defined?)
|
||||
value: config_value.to_s,
|
||||
raw: (!config_expand if has_config_expand?)
|
||||
}.compact
|
||||
else
|
||||
{
|
||||
value: value
|
||||
value: config_value.to_s
|
||||
}.compact
|
||||
end
|
||||
end
|
||||
|
||||
def value_with_prefill_data
|
||||
value_with_data.merge(
|
||||
description: config_description
|
||||
description: config_description,
|
||||
options: config_options
|
||||
).compact
|
||||
end
|
||||
|
||||
def config_value
|
||||
@config[:value]
|
||||
end
|
||||
|
||||
def config_description
|
||||
@config[:description]
|
||||
end
|
||||
|
||||
def config_expand
|
||||
@config[:expand]
|
||||
end
|
||||
|
||||
def config_value_defined?
|
||||
config.key?(:value)
|
||||
end
|
||||
|
||||
def config_description_defined?
|
||||
config.key?(:description)
|
||||
end
|
||||
|
||||
def config_expand_defined?
|
||||
config.key?(:expand)
|
||||
end
|
||||
|
||||
def ci_raw_variables_in_yaml_config_enabled?
|
||||
YamlProcessor::FeatureFlags.enabled?(:ci_raw_variables_in_yaml_config)
|
||||
end
|
||||
end
|
||||
|
||||
class ComplexArrayVariable < ComplexVariable
|
||||
include ::Gitlab::Config::Entry::Validatable
|
||||
|
||||
class << self
|
||||
def applies_to?(config)
|
||||
config.is_a?(Hash) && config[:value].is_a?(Array)
|
||||
end
|
||||
end
|
||||
|
||||
validations do
|
||||
validates :config_value, array_of_strings: true, allow_nil: false, if: :config_value_defined?
|
||||
|
||||
validate do
|
||||
next if opt(:allow_array_value)
|
||||
|
||||
errors.add(:config, 'value must be an alphanumeric string')
|
||||
end
|
||||
end
|
||||
|
||||
def value
|
||||
config_value.first
|
||||
end
|
||||
|
||||
def value_with_prefill_data
|
||||
super.merge(
|
||||
value_options: config_value
|
||||
).compact
|
||||
end
|
||||
end
|
||||
|
||||
class UnknownStrategy < ::Gitlab::Config::Entry::Node
|
||||
def errors
|
||||
["variable definition must be either a string or a hash"]
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def composable_metadata
|
||||
{ allowed_value_data: opt(:allowed_value_data), allow_array_value: opt(:allow_array_value) }
|
||||
{ allowed_value_data: opt(:allowed_value_data) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,19 +7,21 @@ module Gitlab
|
|||
extend ActiveSupport::Concern
|
||||
|
||||
class_methods do
|
||||
def attributes(*attributes)
|
||||
def attributes(*attributes, prefix: nil)
|
||||
attributes.flatten.each do |attribute|
|
||||
if method_defined?(attribute)
|
||||
raise ArgumentError, "Method '#{attribute}' already defined in '#{name}'"
|
||||
attribute_method = prefix ? "#{prefix}_#{attribute}" : attribute
|
||||
|
||||
if method_defined?(attribute_method)
|
||||
raise ArgumentError, "Method '#{attribute_method}' already defined in '#{name}'"
|
||||
end
|
||||
|
||||
define_method(attribute) do
|
||||
define_method(attribute_method) do
|
||||
return unless config.is_a?(Hash)
|
||||
|
||||
config[attribute]
|
||||
end
|
||||
|
||||
define_method("has_#{attribute}?") do
|
||||
define_method("has_#{attribute_method}?") do
|
||||
config.is_a?(Hash) && config.key?(attribute)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ module Gitlab
|
|||
|
||||
schemas_for_connection = Gitlab::Database.gitlab_schemas_for_connection(connection)
|
||||
tables_to_truncate = Gitlab::Database::GitlabSchema.tables_to_schema.reject do |_, schema_name|
|
||||
(GITLAB_SCHEMAS_TO_IGNORE.union(schemas_for_connection)).include?(schema_name)
|
||||
GITLAB_SCHEMAS_TO_IGNORE.union(schemas_for_connection).include?(schema_name)
|
||||
end.keys
|
||||
|
||||
tables_sorted = Gitlab::Database::TablesSortedByForeignKeys.new(connection, tables_to_truncate).execute
|
||||
|
|
|
|||
|
|
@ -85,11 +85,15 @@ module Gitlab
|
|||
return unless Feature.enabled?(:route_hll_to_snowplow_phase2, project.namespace)
|
||||
|
||||
Gitlab::Tracking.event(
|
||||
'merge_requests',
|
||||
MR_APPROVE_ACTION,
|
||||
name,
|
||||
:approve,
|
||||
project: project,
|
||||
namespace: project.namespace,
|
||||
user: user
|
||||
user: user,
|
||||
property: MR_APPROVE_ACTION,
|
||||
label: 'redis_hll_counters.code_review.i_code_review_user_approve_mr_monthly',
|
||||
context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll,
|
||||
event: MR_APPROVE_ACTION).to_context]
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -16051,6 +16051,15 @@ msgstr ""
|
|||
msgid "EscalationPolicies|mins"
|
||||
msgstr ""
|
||||
|
||||
msgid "EscalationStatus|Acknowledged"
|
||||
msgstr ""
|
||||
|
||||
msgid "EscalationStatus|Resolved"
|
||||
msgstr ""
|
||||
|
||||
msgid "EscalationStatus|Triggered"
|
||||
msgstr ""
|
||||
|
||||
msgid "Estimate"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -37052,6 +37061,9 @@ msgstr ""
|
|||
msgid "SecurityReports|All activity"
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|All images"
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityReports|All severities"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -39369,6 +39381,9 @@ msgstr ""
|
|||
msgid "Status"
|
||||
msgstr ""
|
||||
|
||||
msgid "Status (optional)"
|
||||
msgstr ""
|
||||
|
||||
msgid "Status was retried."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -47956,9 +47971,6 @@ msgstr ""
|
|||
msgid "ZentaoIntegration|ZenTao issues"
|
||||
msgstr ""
|
||||
|
||||
msgid "Zoom"
|
||||
msgstr ""
|
||||
|
||||
msgid "Zoom meeting added"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -295,11 +295,11 @@ describe('Pipeline New Form', () => {
|
|||
expect(dropdownItems.at(2).text()).toBe(valueOptions[2]);
|
||||
});
|
||||
|
||||
it('variables with multiple predefined values sets the first option as the default', () => {
|
||||
it('variable with multiple predefined values sets value as the default', () => {
|
||||
const dropdown = findValueDropdowns().at(0);
|
||||
const { valueOptions } = mockYamlVariables[2];
|
||||
|
||||
expect(dropdown.props('text')).toBe(valueOptions[0]);
|
||||
expect(dropdown.props('text')).toBe(valueOptions[1]);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ export const mockYamlVariables = [
|
|||
{
|
||||
description: 'This is a variable with predefined values.',
|
||||
key: 'VAR_WITH_OPTIONS',
|
||||
value: 'development',
|
||||
value: 'staging',
|
||||
valueOptions: ['development', 'staging', 'production'],
|
||||
},
|
||||
];
|
||||
|
|
@ -105,7 +105,7 @@ export const mockYamlVariablesWithoutDesc = [
|
|||
{
|
||||
description: null,
|
||||
key: 'VAR_WITH_OPTIONS',
|
||||
value: 'development',
|
||||
value: 'staging',
|
||||
valueOptions: ['development', 'staging', 'production'],
|
||||
},
|
||||
];
|
||||
|
|
|
|||
|
|
@ -93,10 +93,10 @@ RSpec.describe 'lograge', type: :request do
|
|||
include MemoryInstrumentationHelper
|
||||
|
||||
before do
|
||||
skip_memory_instrumentation!
|
||||
verify_memory_instrumentation_available!
|
||||
end
|
||||
|
||||
it 'logs memory usage metrics', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/384081' do
|
||||
it 'logs memory usage metrics' do
|
||||
expect(Lograge.formatter).to receive(:call)
|
||||
.with(a_hash_including(:mem_objects))
|
||||
.and_call_original
|
||||
|
|
|
|||
|
|
@ -141,7 +141,8 @@ RSpec.describe Gitlab::ApplicationContext do
|
|||
describe 'setting the client' do
|
||||
let_it_be(:remote_ip) { '127.0.0.1' }
|
||||
let_it_be(:runner) { create(:ci_runner) }
|
||||
let_it_be(:options) { { remote_ip: remote_ip, runner: runner, user: user } }
|
||||
let_it_be(:job) { create(:ci_build, :pending, :queued, user: user, project: project) }
|
||||
let_it_be(:options) { { remote_ip: remote_ip, runner: runner, user: user, job: job } }
|
||||
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
|
|
@ -150,6 +151,7 @@ RSpec.describe Gitlab::ApplicationContext do
|
|||
[:remote_ip, :runner] | :runner
|
||||
[:remote_ip, :runner, :user] | :runner
|
||||
[:remote_ip, :user] | :user
|
||||
[:job] | :user
|
||||
end
|
||||
|
||||
with_them do
|
||||
|
|
|
|||
|
|
@ -103,7 +103,7 @@ RSpec.describe Gitlab::BackgroundMigration::DestroyInvalidMembers, :migration, s
|
|||
members = create_members
|
||||
|
||||
member_data = members.map do |m|
|
||||
{ id: m.id, source_id: m.source_id, source_type: m.source_type }
|
||||
{ id: m.id, source_id: m.source_id, source_type: m.source_type, access_level: m.access_level }
|
||||
end
|
||||
|
||||
expect(Gitlab::AppLogger).to receive(:info).with({ message: 'Removing invalid member records',
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
|
|||
variables: {
|
||||
VAR: 'root',
|
||||
VAR2: { value: 'val 2', description: 'this is var 2' },
|
||||
VAR3: { value: %w[val3 val3b], description: 'this is var 3' }
|
||||
VAR3: { value: 'val3', options: %w[val3 val4 val5], description: 'this is var 3 and some options' }
|
||||
},
|
||||
after_script: ['make clean'],
|
||||
stages: %w(build pages release),
|
||||
|
|
@ -326,6 +326,42 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when variables have `options` data' do
|
||||
before do
|
||||
root.compose!
|
||||
end
|
||||
|
||||
context 'and the value is in the `options` array' do
|
||||
let(:hash) do
|
||||
{
|
||||
variables: { 'VAR' => { value: 'val1', options: %w[val1 val2] } },
|
||||
rspec: { script: 'bin/rspec' }
|
||||
}
|
||||
end
|
||||
|
||||
it 'returns correct value' do
|
||||
expect(root.variables_entry.value_with_data).to eq(
|
||||
'VAR' => { value: 'val1' }
|
||||
)
|
||||
|
||||
expect(root.variables_value).to eq('VAR' => 'val1')
|
||||
end
|
||||
end
|
||||
|
||||
context 'and the value is not in the `options` array' do
|
||||
let(:hash) do
|
||||
{
|
||||
variables: { 'VAR' => { value: 'val', options: %w[val1 val2] } },
|
||||
rspec: { script: 'bin/rspec' }
|
||||
}
|
||||
end
|
||||
|
||||
it 'returns an error' do
|
||||
expect(root.errors).to contain_exactly('variables:var config value must be present in options')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when variables have "expand" data' do
|
||||
let(:hash) do
|
||||
{
|
||||
|
|
|
|||
|
|
@ -306,48 +306,48 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variable do
|
|||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'ComplexArrayVariable' do
|
||||
context 'when allow_array_value metadata is false' do
|
||||
let(:config) { { value: %w[value value2], description: 'description' } }
|
||||
let(:metadata) { { allow_array_value: false } }
|
||||
context 'when config is a hash with options' do
|
||||
context 'when there is no metadata' do
|
||||
let(:config) { { value: 'value', options: %w[value value2] } }
|
||||
let(:metadata) { {} }
|
||||
|
||||
describe '#valid?' do
|
||||
it { is_expected.not_to be_valid }
|
||||
describe '#valid?' do
|
||||
it { is_expected.not_to be_valid }
|
||||
end
|
||||
|
||||
describe '#errors' do
|
||||
subject(:errors) { entry.errors }
|
||||
|
||||
it { is_expected.to include 'var1 config must be a string' }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#errors' do
|
||||
subject(:errors) { entry.errors }
|
||||
context 'when options are allowed' do
|
||||
let(:config) { { value: 'value', options: %w[value value2] } }
|
||||
let(:metadata) { { allowed_value_data: %i[value options] } }
|
||||
|
||||
it { is_expected.to include 'var1 config value must be an alphanumeric string' }
|
||||
end
|
||||
end
|
||||
describe '#valid?' do
|
||||
it { is_expected.to be_valid }
|
||||
end
|
||||
|
||||
context 'when allow_array_value metadata is true' do
|
||||
let(:config) { { value: %w[value value2], description: 'description' } }
|
||||
let(:metadata) { { allowed_value_data: %i[value description], allow_array_value: true } }
|
||||
describe '#value' do
|
||||
subject(:value) { entry.value }
|
||||
|
||||
describe '#valid?' do
|
||||
it { is_expected.to be_valid }
|
||||
end
|
||||
it { is_expected.to eq('value') }
|
||||
end
|
||||
|
||||
describe '#value' do
|
||||
subject(:value) { entry.value }
|
||||
describe '#value_with_data' do
|
||||
subject(:value_with_data) { entry.value_with_data }
|
||||
|
||||
it { is_expected.to eq('value') }
|
||||
end
|
||||
it { is_expected.to eq(value: 'value') }
|
||||
end
|
||||
|
||||
describe '#value_with_data' do
|
||||
subject(:value_with_data) { entry.value_with_data }
|
||||
describe '#value_with_prefill_data' do
|
||||
subject(:value_with_prefill_data) { entry.value_with_prefill_data }
|
||||
|
||||
it { is_expected.to eq(value: 'value') }
|
||||
end
|
||||
|
||||
describe '#value_with_prefill_data' do
|
||||
subject(:value_with_prefill_data) { entry.value_with_prefill_data }
|
||||
|
||||
it { is_expected.to eq(value: 'value', description: 'description', value_options: %w[value value2]) }
|
||||
it { is_expected.to eq(value: 'value', options: %w[value value2]) }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -116,8 +116,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variables do
|
|||
it_behaves_like 'invalid config', /variable_1 config must be a string/
|
||||
end
|
||||
|
||||
context 'when metadata has allow_array_value and allowed_value_data' do
|
||||
let(:metadata) { { allowed_value_data: %i[value description], allow_array_value: true } }
|
||||
context 'when metadata has the allowed_value_data key' do
|
||||
let(:metadata) { { allowed_value_data: %i[value description options] } }
|
||||
|
||||
let(:result) do
|
||||
{ 'VARIABLE_1' => 'value' }
|
||||
|
|
@ -143,17 +143,15 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variables do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when entry config value has key-value pair and value is an array' do
|
||||
context 'when entry config value has options' do
|
||||
let(:config) do
|
||||
{ 'VARIABLE_1' => { value: %w[value1 value2], description: 'variable 1' } }
|
||||
{ 'VARIABLE_1' => {
|
||||
value: 'value1', options: %w[value1 value2], description: 'variable 1'
|
||||
} }
|
||||
end
|
||||
|
||||
context 'when there is no allowed_value_data metadata' do
|
||||
it_behaves_like 'invalid config', /variable_1 config value must be an alphanumeric string/
|
||||
end
|
||||
|
||||
context 'when metadata has allow_array_value and allowed_value_data' do
|
||||
let(:metadata) { { allowed_value_data: %i[value description], allow_array_value: true } }
|
||||
context 'when metadata has allowed_value_data' do
|
||||
let(:metadata) { { allowed_value_data: %i[value description options] } }
|
||||
|
||||
let(:result) do
|
||||
{ 'VARIABLE_1' => 'value1' }
|
||||
|
|
@ -172,7 +170,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variables do
|
|||
describe '#value_with_prefill_data' do
|
||||
it 'returns variable with prefill data' do
|
||||
expect(entry.value_with_prefill_data).to eq(
|
||||
'VARIABLE_1' => { value: 'value1', value_options: %w[value1 value2], description: 'variable 1' }
|
||||
'VARIABLE_1' => { value: 'value1', options: %w[value1 value2], description: 'variable 1' }
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -234,14 +232,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variables do
|
|||
it_behaves_like 'invalid config', /variable_1 config uses invalid data keys: hello/
|
||||
end
|
||||
|
||||
context 'when entry config value has hash with nil description' do
|
||||
let(:config) do
|
||||
{ 'VARIABLE_1' => { value: 'value 1', description: nil } }
|
||||
end
|
||||
|
||||
it_behaves_like 'invalid config', /variable_1 config description must be an alphanumeric string/
|
||||
end
|
||||
|
||||
context 'when entry config value has hash without description' do
|
||||
let(:config) do
|
||||
{ 'VARIABLE_1' => { value: 'value 1' } }
|
||||
|
|
|
|||
|
|
@ -10,10 +10,11 @@ RSpec.describe Gitlab::Config::Entry::Attributable do
|
|||
end
|
||||
|
||||
let(:instance) { node.new }
|
||||
let(:prefix) { nil }
|
||||
|
||||
before do
|
||||
node.class_eval do
|
||||
attributes :name, :test
|
||||
node.class_exec(prefix) do |pre|
|
||||
attributes :name, :test, prefix: pre
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -24,6 +25,17 @@ RSpec.describe Gitlab::Config::Entry::Attributable do
|
|||
.and_return({ name: 'some name', test: 'some test' })
|
||||
end
|
||||
|
||||
context 'and is provided a prefix' do
|
||||
let(:prefix) { :pre }
|
||||
|
||||
it 'returns the value of config' do
|
||||
expect(instance).to have_pre_name
|
||||
expect(instance.pre_name).to eq 'some name'
|
||||
expect(instance).to have_pre_test
|
||||
expect(instance.pre_test).to eq 'some test'
|
||||
end
|
||||
end
|
||||
|
||||
it 'returns the value of config' do
|
||||
expect(instance).to have_name
|
||||
expect(instance.name).to eq 'some name'
|
||||
|
|
|
|||
|
|
@ -126,10 +126,10 @@ RSpec.describe Gitlab::InstrumentationHelper do
|
|||
include MemoryInstrumentationHelper
|
||||
|
||||
before do
|
||||
skip_memory_instrumentation!
|
||||
verify_memory_instrumentation_available!
|
||||
end
|
||||
|
||||
it 'logs memory usage metrics', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/384081' do
|
||||
it 'logs memory usage metrics' do
|
||||
subject
|
||||
|
||||
expect(payload).to include(
|
||||
|
|
|
|||
|
|
@ -2,15 +2,15 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Memory::Instrumentation do
|
||||
RSpec.describe Gitlab::Memory::Instrumentation, feature_category: :application_performance do
|
||||
include MemoryInstrumentationHelper
|
||||
|
||||
before do
|
||||
skip_memory_instrumentation!
|
||||
verify_memory_instrumentation_available!
|
||||
end
|
||||
|
||||
describe '.available?' do
|
||||
it 'returns true', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/384081' do
|
||||
it 'returns true' do
|
||||
expect(described_class).to be_available
|
||||
end
|
||||
end
|
||||
|
|
@ -18,7 +18,7 @@ RSpec.describe Gitlab::Memory::Instrumentation do
|
|||
describe '.start_thread_memory_allocations' do
|
||||
subject { described_class.start_thread_memory_allocations }
|
||||
|
||||
it 'a hash is returned', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/384081' do
|
||||
it 'a hash is returned' do
|
||||
is_expected.to be_a(Hash)
|
||||
end
|
||||
|
||||
|
|
@ -47,7 +47,7 @@ RSpec.describe Gitlab::Memory::Instrumentation do
|
|||
expect(described_class).to receive(:measure_thread_memory_allocations).and_call_original
|
||||
end
|
||||
|
||||
it 'a hash is returned', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/384081' do
|
||||
it 'a hash is returned' do
|
||||
result = subject
|
||||
expect(result).to include(
|
||||
mem_objects: be > 1000,
|
||||
|
|
|
|||
|
|
@ -94,30 +94,15 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
|
|||
let(:action) { described_class::MR_APPROVE_ACTION }
|
||||
end
|
||||
|
||||
it 'records correct payload with Snowplow event', :snowplow do
|
||||
stub_feature_flags(route_hll_to_snowplow_phase2: true)
|
||||
|
||||
subject
|
||||
|
||||
expect_snowplow_event(
|
||||
category: 'merge_requests',
|
||||
action: 'i_code_review_user_approve_mr',
|
||||
namespace: target_project.namespace,
|
||||
user: user,
|
||||
project: target_project
|
||||
)
|
||||
end
|
||||
|
||||
context 'when FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(route_hll_to_snowplow_phase2: false)
|
||||
end
|
||||
|
||||
it 'doesnt emit snowplow events', :snowplow do
|
||||
subject
|
||||
|
||||
expect_no_snowplow_event
|
||||
end
|
||||
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
|
||||
let(:action) { :approve }
|
||||
let(:category) { described_class.name }
|
||||
let(:project) { target_project }
|
||||
let(:namespace) { project.namespace.reload }
|
||||
let(:user) { project.creator }
|
||||
let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
|
||||
let(:label) { 'redis_hll_counters.code_review.i_code_review_user_approve_mr_monthly' }
|
||||
let(:property) { described_class::MR_APPROVE_ACTION }
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,72 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe FinalizeGroupMemberNamespaceIdMigration, :migration do
|
||||
let(:batched_migrations) { table(:batched_background_migrations) }
|
||||
|
||||
let_it_be(:migration) { described_class::MIGRATION }
|
||||
|
||||
describe '#up' do
|
||||
shared_examples 'finalizes the migration' do
|
||||
it 'finalizes the migration' do
|
||||
allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
|
||||
expect(runner).to receive(:finalize).with('BackfillMemberNamespaceForGroupMembers', :members, :id, [])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when migration is missing' do
|
||||
it 'warns migration not found' do
|
||||
expect(Gitlab::AppLogger)
|
||||
.to receive(:warn).with(/Could not find batched background migration for the given configuration:/)
|
||||
|
||||
migrate!
|
||||
end
|
||||
end
|
||||
|
||||
context 'with migration present' do
|
||||
let!(:group_member_namespace_id_backfill) do
|
||||
batched_migrations.create!(
|
||||
job_class_name: 'BackfillMemberNamespaceForGroupMembers',
|
||||
table_name: :members,
|
||||
column_name: :id,
|
||||
job_arguments: [],
|
||||
interval: 2.minutes,
|
||||
min_value: 1,
|
||||
max_value: 2,
|
||||
batch_size: 1000,
|
||||
sub_batch_size: 200,
|
||||
gitlab_schema: :gitlab_main,
|
||||
status: 3 # finished
|
||||
)
|
||||
end
|
||||
|
||||
context 'when migration finished successfully' do
|
||||
it 'does not raise exception' do
|
||||
expect { migrate! }.not_to raise_error
|
||||
end
|
||||
end
|
||||
|
||||
context 'with different migration statuses' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
where(:status, :description) do
|
||||
0 | 'paused'
|
||||
1 | 'active'
|
||||
4 | 'failed'
|
||||
5 | 'finalizing'
|
||||
end
|
||||
|
||||
with_them do
|
||||
before do
|
||||
group_member_namespace_id_backfill.update!(status: status)
|
||||
end
|
||||
|
||||
it_behaves_like 'finalizes the migration'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -6160,33 +6160,44 @@ RSpec.describe User do
|
|||
|
||||
describe '#notification_email_for' do
|
||||
let(:user) { create(:user) }
|
||||
let(:group) { create(:group) }
|
||||
|
||||
subject { user.notification_email_for(group) }
|
||||
subject { user.notification_email_for(namespace) }
|
||||
|
||||
context 'when group is nil' do
|
||||
let(:group) { nil }
|
||||
context 'when namespace is nil' do
|
||||
let(:namespace) { nil }
|
||||
|
||||
it 'returns global notification email' do
|
||||
is_expected.to eq(user.notification_email_or_default)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when group has no notification email set' do
|
||||
it 'returns global notification email' do
|
||||
create(:notification_setting, user: user, source: group, notification_email: '')
|
||||
context 'for group namespace' do
|
||||
let(:namespace) { create(:group) }
|
||||
|
||||
is_expected.to eq(user.notification_email_or_default)
|
||||
context 'when group has no notification email set' do
|
||||
it 'returns global notification email' do
|
||||
create(:notification_setting, user: user, source: namespace, notification_email: '')
|
||||
|
||||
is_expected.to eq(user.notification_email_or_default)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when group has notification email set' do
|
||||
it 'returns group notification email' do
|
||||
group_notification_email = 'user+group@example.com'
|
||||
create(:email, :confirmed, user: user, email: group_notification_email)
|
||||
create(:notification_setting, user: user, source: namespace, notification_email: group_notification_email)
|
||||
|
||||
is_expected.to eq(group_notification_email)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when group has notification email set' do
|
||||
it 'returns group notification email' do
|
||||
group_notification_email = 'user+group@example.com'
|
||||
create(:email, :confirmed, user: user, email: group_notification_email)
|
||||
create(:notification_setting, user: user, source: group, notification_email: group_notification_email)
|
||||
context 'for user namespace' do
|
||||
let(:namespace) { create(:user_namespace) }
|
||||
|
||||
is_expected.to eq(group_notification_email)
|
||||
it 'returns global notification email' do
|
||||
is_expected.to eq(user.notification_email_or_default)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6,6 +6,17 @@ RSpec.describe API::ConanProjectPackages do
|
|||
|
||||
let(:project_id) { project.id }
|
||||
|
||||
shared_examples 'accept get request on private project with access to package registry for everyone' do
|
||||
subject { get api(url) }
|
||||
|
||||
before do
|
||||
project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
|
||||
project.project_feature.update!(package_registry_access_level: ProjectFeature::PUBLIC)
|
||||
end
|
||||
|
||||
it_behaves_like 'returning response status', :ok
|
||||
end
|
||||
|
||||
describe 'GET /api/v4/projects/:id/packages/conan/v1/ping' do
|
||||
let(:url) { "/projects/#{project.id}/packages/conan/v1/ping" }
|
||||
|
||||
|
|
@ -40,43 +51,50 @@ RSpec.describe API::ConanProjectPackages do
|
|||
include_context 'conan recipe endpoints'
|
||||
|
||||
let(:url_prefix) { "#{Settings.gitlab.base_url}/api/v4/projects/#{project_id}" }
|
||||
let(:recipe_path) { package.conan_recipe_path }
|
||||
|
||||
subject { get api(url), headers: headers }
|
||||
|
||||
describe 'GET /api/v4/projects/:id/packages/conan/v1/conans/:package_name/package_version/:package_username/:package_channel' do
|
||||
let(:recipe_path) { package.conan_recipe_path }
|
||||
let(:url) { "/projects/#{project_id}/packages/conan/v1/conans/#{recipe_path}" }
|
||||
|
||||
it_behaves_like 'recipe snapshot endpoint'
|
||||
it_behaves_like 'accept get request on private project with access to package registry for everyone'
|
||||
end
|
||||
|
||||
describe 'GET /api/v4/projects/:id/packages/conan/v1/conans/:package_name/package_version/:package_username/:package_channel/packages/:conan_package_reference' do
|
||||
let(:recipe_path) { package.conan_recipe_path }
|
||||
let(:url) { "/projects/#{project_id}/packages/conan/v1/conans/#{recipe_path}/packages/#{conan_package_reference}" }
|
||||
|
||||
it_behaves_like 'package snapshot endpoint'
|
||||
it_behaves_like 'accept get request on private project with access to package registry for everyone'
|
||||
end
|
||||
|
||||
describe 'GET /api/v4/projects/:id/packages/conan/v1/conans/:package_name/package_version/:package_username/:package_channel/digest' do
|
||||
subject { get api("/projects/#{project_id}/packages/conan/v1/conans/#{recipe_path}/digest"), headers: headers }
|
||||
let(:url) { "/projects/#{project_id}/packages/conan/v1/conans/#{recipe_path}/digest" }
|
||||
|
||||
it_behaves_like 'recipe download_urls endpoint'
|
||||
it_behaves_like 'accept get request on private project with access to package registry for everyone'
|
||||
end
|
||||
|
||||
describe 'GET /api/v4/projects/:id/packages/conan/v1/conans/:package_name/package_version/:package_username/:package_channel/packages/:conan_package_reference/download_urls' do
|
||||
subject { get api("/projects/#{project_id}/packages/conan/v1/conans/#{recipe_path}/packages/#{conan_package_reference}/download_urls"), headers: headers }
|
||||
let(:url) { "/projects/#{project_id}/packages/conan/v1/conans/#{recipe_path}/packages/#{conan_package_reference}/download_urls" }
|
||||
|
||||
it_behaves_like 'package download_urls endpoint'
|
||||
it_behaves_like 'accept get request on private project with access to package registry for everyone'
|
||||
end
|
||||
|
||||
describe 'GET /api/v4/projects/:id/packages/conan/v1/conans/:package_name/package_version/:package_username/:package_channel/download_urls' do
|
||||
subject { get api("/projects/#{project_id}/packages/conan/v1/conans/#{recipe_path}/download_urls"), headers: headers }
|
||||
let(:url) { "/projects/#{project_id}/packages/conan/v1/conans/#{recipe_path}/download_urls" }
|
||||
|
||||
it_behaves_like 'recipe download_urls endpoint'
|
||||
it_behaves_like 'accept get request on private project with access to package registry for everyone'
|
||||
end
|
||||
|
||||
describe 'GET /api/v4/projects/:id/packages/conan/v1/conans/:package_name/package_version/:package_username/:package_channel/packages/:conan_package_reference/digest' do
|
||||
subject { get api("/projects/#{project_id}/packages/conan/v1/conans/#{recipe_path}/packages/#{conan_package_reference}/digest"), headers: headers }
|
||||
let(:url) { "/projects/#{project_id}/packages/conan/v1/conans/#{recipe_path}/packages/#{conan_package_reference}/digest" }
|
||||
|
||||
it_behaves_like 'package download_urls endpoint'
|
||||
it_behaves_like 'accept get request on private project with access to package registry for everyone'
|
||||
end
|
||||
|
||||
describe 'POST /api/v4/projects/:id/packages/conan/v1/conans/:package_name/package_version/:package_username/:package_channel/upload_urls' do
|
||||
|
|
@ -101,24 +119,22 @@ RSpec.describe API::ConanProjectPackages do
|
|||
context 'file download endpoints', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/326194' do
|
||||
include_context 'conan file download endpoints'
|
||||
|
||||
subject { get api(url), headers: headers }
|
||||
|
||||
describe 'GET /api/v4/projects/:id/packages/conan/v1/files/:package_name/package_version/:package_username/:package_channel/:recipe_revision/export/:file_name' do
|
||||
subject do
|
||||
get api("/projects/#{project_id}/packages/conan/v1/files/#{recipe_path}/#{metadata.recipe_revision}/export/#{recipe_file.file_name}"),
|
||||
headers: headers
|
||||
end
|
||||
let(:url) { "/projects/#{project_id}/packages/conan/v1/files/#{recipe_path}/#{metadata.recipe_revision}/export/#{recipe_file.file_name}" }
|
||||
|
||||
it_behaves_like 'recipe file download endpoint'
|
||||
it_behaves_like 'project not found by project id'
|
||||
it_behaves_like 'accept get request on private project with access to package registry for everyone'
|
||||
end
|
||||
|
||||
describe 'GET /api/v4/projects/:id/packages/conan/v1/files/:package_name/package_version/:package_username/:package_channel/:recipe_revision/package/:conan_package_reference/:package_revision/:file_name' do
|
||||
subject do
|
||||
get api("/projects/#{project_id}/packages/conan/v1/files/#{recipe_path}/#{metadata.recipe_revision}/package/#{metadata.conan_package_reference}/#{metadata.package_revision}/#{package_file.file_name}"),
|
||||
headers: headers
|
||||
end
|
||||
let(:url) { "/projects/#{project_id}/packages/conan/v1/files/#{recipe_path}/#{metadata.recipe_revision}/package/#{metadata.conan_package_reference}/#{metadata.package_revision}/#{package_file.file_name}" }
|
||||
|
||||
it_behaves_like 'package file download endpoint'
|
||||
it_behaves_like 'project not found by project id'
|
||||
it_behaves_like 'accept get request on private project with access to package registry for everyone'
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -1,162 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Clusters::Applications::PrometheusConfigService do
|
||||
include Gitlab::Routing.url_helpers
|
||||
|
||||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:production) { create(:environment, project: project) }
|
||||
let_it_be(:cluster) { create(:cluster, :provided_by_user, projects: [project]) }
|
||||
|
||||
let(:application) do
|
||||
create(:clusters_applications_prometheus, :installed, cluster: cluster)
|
||||
end
|
||||
|
||||
subject { described_class.new(project, cluster, application).execute(input) }
|
||||
|
||||
describe '#execute' do
|
||||
let(:input) do
|
||||
YAML.load_file(Rails.root.join('vendor/prometheus/values.yaml'))
|
||||
end
|
||||
|
||||
context 'with alerts' do
|
||||
let!(:alert) do
|
||||
create(:prometheus_alert, project: project, environment: production)
|
||||
end
|
||||
|
||||
it 'enables alertmanager' do
|
||||
expect(subject.dig('alertmanager', 'enabled')).to eq(true)
|
||||
end
|
||||
|
||||
describe 'alertmanagerFiles' do
|
||||
let(:alertmanager) do
|
||||
subject.dig('alertmanagerFiles', 'alertmanager.yml')
|
||||
end
|
||||
|
||||
it 'contains receivers and route' do
|
||||
expect(alertmanager.keys).to contain_exactly('receivers', 'route')
|
||||
end
|
||||
|
||||
describe 'receivers' do
|
||||
let(:receiver) { alertmanager.dig('receivers', 0) }
|
||||
let(:webhook_config) { receiver.dig('webhook_configs', 0) }
|
||||
|
||||
let(:notify_url) do
|
||||
notify_project_prometheus_alerts_url(project, format: :json)
|
||||
end
|
||||
|
||||
it 'sets receiver' do
|
||||
expect(receiver['name']).to eq('gitlab')
|
||||
end
|
||||
|
||||
it 'sets webhook_config' do
|
||||
expect(webhook_config).to eq(
|
||||
'url' => notify_url,
|
||||
'send_resolved' => true,
|
||||
'http_config' => {
|
||||
'bearer_token' => application.alert_manager_token
|
||||
}
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'route' do
|
||||
let(:route) { alertmanager.fetch('route') }
|
||||
|
||||
it 'sets route' do
|
||||
expect(route).to eq(
|
||||
'receiver' => 'gitlab',
|
||||
'group_wait' => '30s',
|
||||
'group_interval' => '5m',
|
||||
'repeat_interval' => '4h'
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'serverFiles' do
|
||||
let(:groups) { subject.dig('serverFiles', 'alerts', 'groups') }
|
||||
|
||||
it 'sets the alerts' do
|
||||
rules = groups.dig(0, 'rules')
|
||||
expect(rules.size).to eq(1)
|
||||
|
||||
expect(rules.first['alert']).to eq(alert.title)
|
||||
end
|
||||
|
||||
context 'with parameterized queries' do
|
||||
let!(:alert) do
|
||||
create(:prometheus_alert,
|
||||
project: project,
|
||||
environment: production,
|
||||
prometheus_metric: metric,
|
||||
operator: PrometheusAlert.operators['gt'],
|
||||
threshold: 0)
|
||||
end
|
||||
|
||||
let(:metric) do
|
||||
create(:prometheus_metric, query: query, project: project)
|
||||
end
|
||||
|
||||
let(:query) { 'up{environment="{{ci_environment_slug}}"}' }
|
||||
|
||||
it 'substitutes query variables' do
|
||||
expect(Gitlab::Prometheus::QueryVariables)
|
||||
.to receive(:call)
|
||||
.with(production, start_time: nil, end_time: nil)
|
||||
.and_call_original
|
||||
|
||||
expr = groups.dig(0, 'rules', 0, 'expr')
|
||||
expect(expr).to eq("up{environment=\"#{production.slug}\"} > 0.0")
|
||||
end
|
||||
end
|
||||
|
||||
context 'with multiple environments' do
|
||||
let(:staging) { create(:environment, project: project) }
|
||||
|
||||
before do
|
||||
create(:prometheus_alert, project: project, environment: production)
|
||||
create(:prometheus_alert, project: project, environment: staging)
|
||||
end
|
||||
|
||||
it 'sets alerts for multiple environment' do
|
||||
env_names = groups.map { |group| group['name'] }
|
||||
expect(env_names).to contain_exactly(
|
||||
"#{production.name}.rules",
|
||||
"#{staging.name}.rules"
|
||||
)
|
||||
end
|
||||
|
||||
it 'substitutes query variables once per environment' do
|
||||
allow(Gitlab::Prometheus::QueryVariables).to receive(:call).and_call_original
|
||||
|
||||
expect(Gitlab::Prometheus::QueryVariables)
|
||||
.to receive(:call)
|
||||
.with(production, start_time: nil, end_time: nil)
|
||||
|
||||
expect(Gitlab::Prometheus::QueryVariables)
|
||||
.to receive(:call)
|
||||
.with(staging, start_time: nil, end_time: nil)
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'without alerts' do
|
||||
it 'disables alertmanager' do
|
||||
expect(subject.dig('alertmanager', 'enabled')).to eq(false)
|
||||
end
|
||||
|
||||
it 'removes alertmanagerFiles' do
|
||||
expect(subject).not_to include('alertmanagerFiles')
|
||||
end
|
||||
|
||||
it 'removes alerts' do
|
||||
expect(subject.dig('serverFiles', 'alerts')).to eq({})
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -12,7 +12,8 @@ variables:
|
|||
description: 'value of KEY_VALUE_VAR'
|
||||
DB_NAME: postgres
|
||||
ENVIRONMENT_VAR:
|
||||
value: ['env var value', 'env var value2']
|
||||
value: 'env var value'
|
||||
options: ['env var value', 'env var value2']
|
||||
description: 'env var description'
|
||||
|
||||
stages:
|
||||
|
|
|
|||
|
|
@ -5,13 +5,10 @@
|
|||
# This concept is currently tried to be upstreamed here:
|
||||
# - https://github.com/ruby/ruby/pull/3978
|
||||
module MemoryInstrumentationHelper
|
||||
def skip_memory_instrumentation!
|
||||
def verify_memory_instrumentation_available!
|
||||
return if ::Gitlab::Memory::Instrumentation.available?
|
||||
|
||||
# if we are running in CI, a test cannot be skipped
|
||||
return if ENV['CI']
|
||||
|
||||
skip 'Missing a memory instrumentation patch. ' \
|
||||
raise 'Ruby is missing a required patch that enables memory instrumentation. ' \
|
||||
'More information can be found here: https://gitlab.com/gitlab-org/gitlab/-/issues/296530.'
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -275,30 +275,17 @@ RSpec.describe PostReceive do
|
|||
expect { perform }.to change { counter.read(:pushes) }.by(1)
|
||||
end
|
||||
|
||||
it 'records correct payload with Snowplow event', :snowplow do
|
||||
stub_feature_flags(route_hll_to_snowplow_phase2: true)
|
||||
it_behaves_like 'Snowplow event tracking' do
|
||||
let(:action) { :push }
|
||||
let(:category) { described_class.name }
|
||||
let(:namespace) { project.namespace }
|
||||
let(:user) { project.creator }
|
||||
let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
|
||||
let(:label) { 'counts.source_code_pushes' }
|
||||
let(:property) { 'source_code_pushes' }
|
||||
let(:context) { [Gitlab::Tracking::ServicePingContext.new(data_source: :redis, key_path: label).to_h] }
|
||||
|
||||
perform
|
||||
|
||||
expect_snowplow_event(
|
||||
category: 'PostReceive',
|
||||
action: 'source_code_pushes',
|
||||
namespace: project.namespace,
|
||||
user: project.first_owner,
|
||||
project: project
|
||||
)
|
||||
end
|
||||
|
||||
context 'when FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(route_hll_to_snowplow_phase2: false)
|
||||
end
|
||||
|
||||
it 'doesnt emit snowplow events', :snowplow do
|
||||
perform
|
||||
|
||||
expect_no_snowplow_event
|
||||
end
|
||||
subject(:post_receive) { perform }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ require (
|
|||
github.com/sirupsen/logrus v1.9.0
|
||||
github.com/smartystreets/goconvey v1.7.2
|
||||
github.com/stretchr/testify v1.8.1
|
||||
gitlab.com/gitlab-org/gitaly/v15 v15.5.4
|
||||
gitlab.com/gitlab-org/gitaly/v15 v15.6.1
|
||||
gitlab.com/gitlab-org/golang-archive-zip v0.1.1
|
||||
gitlab.com/gitlab-org/labkit v1.16.1
|
||||
gocloud.dev v0.27.0
|
||||
|
|
@ -69,7 +69,7 @@ require (
|
|||
github.com/dlclark/regexp2 v1.4.0 // indirect
|
||||
github.com/go-ole/go-ole v1.2.4 // indirect
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/golang-jwt/jwt v3.2.1+incompatible // indirect
|
||||
github.com/golang-jwt/jwt v3.2.2+incompatible // indirect
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||
github.com/google/go-cmp v0.5.9 // indirect
|
||||
github.com/google/pprof v0.0.0-20220608213341-c488b8fa1db3 // indirect
|
||||
|
|
@ -107,13 +107,13 @@ require (
|
|||
github.com/uber/jaeger-lib v2.4.1+incompatible // indirect
|
||||
go.opencensus.io v0.23.0 // indirect
|
||||
go.uber.org/atomic v1.9.0 // indirect
|
||||
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa // indirect
|
||||
golang.org/x/crypto v0.0.0-20220926161630-eccd6366d1be // indirect
|
||||
golang.org/x/exp/typeparams v0.0.0-20220218215828-6cf2b201936e // indirect
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 // indirect
|
||||
golang.org/x/sync v0.1.0 // indirect
|
||||
golang.org/x/sys v0.1.0 // indirect
|
||||
golang.org/x/text v0.4.0 // indirect
|
||||
golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9 // indirect
|
||||
golang.org/x/time v0.2.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f // indirect
|
||||
google.golang.org/api v0.91.0 // indirect
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
|
|
|
|||
|
|
@ -683,8 +683,9 @@ github.com/gogo/protobuf v1.3.0/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXP
|
|||
github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
|
||||
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
|
||||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/golang-jwt/jwt v3.2.1+incompatible h1:73Z+4BJcrTC+KczS6WvTPvRGOp1WmfEP4Q1lOd9Z/+c=
|
||||
github.com/golang-jwt/jwt v3.2.1+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
|
||||
github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY=
|
||||
github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
|
||||
github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg=
|
||||
github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg=
|
||||
github.com/golang-jwt/jwt/v4 v4.4.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
|
||||
|
|
@ -1485,8 +1486,8 @@ github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX
|
|||
github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA=
|
||||
github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg=
|
||||
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
|
||||
gitlab.com/gitlab-org/gitaly/v15 v15.5.4 h1:vPhfE50uWAWjB/WkKPG8Xr6BxNpql5RxVgIHt24IRtc=
|
||||
gitlab.com/gitlab-org/gitaly/v15 v15.5.4/go.mod h1:G5q5H6OYMSEDnKXsQoYTzI+ysCTfM4Of2z0v6xeHtRY=
|
||||
gitlab.com/gitlab-org/gitaly/v15 v15.6.1 h1:Rb1vmtNAitbZ85Cog49vUfcDrU5jWB8BG09lXZmC2sM=
|
||||
gitlab.com/gitlab-org/gitaly/v15 v15.6.1/go.mod h1:RKa+3ADKfTonDb1pe8AtppdNHNeOM+ChtMmB7T0QWhY=
|
||||
gitlab.com/gitlab-org/golang-archive-zip v0.1.1 h1:35k9giivbxwF03+8A05Cm8YoxoakU8FBCj5gysjCTCE=
|
||||
gitlab.com/gitlab-org/golang-archive-zip v0.1.1/go.mod h1:ZDtqpWPGPB9qBuZnZDrKQjIdJtkN7ZAoVwhT6H2o2kE=
|
||||
gitlab.com/gitlab-org/labkit v1.16.1 h1:J+HmNVR5bvPfrv9/fgKICFis2nmEugRXHMeRPvsVZUg=
|
||||
|
|
@ -1619,8 +1620,9 @@ golang.org/x/crypto v0.0.0-20211202192323-5770296d904e/go.mod h1:IxCIyHEi3zRg3s0
|
|||
golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.0.0-20220511200225-c6db032c6c88/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa h1:zuSxTR4o9y82ebqCUJYNGJbGPo6sKVl54f/TVDObg1c=
|
||||
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.0.0-20220926161630-eccd6366d1be h1:fmw3UbQh+nxngCAHrDCCztao/kbYFnWjoqop8dHx05A=
|
||||
golang.org/x/crypto v0.0.0-20220926161630-eccd6366d1be/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
|
|
@ -1788,8 +1790,9 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ
|
|||
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 h1:uVc8UZUe6tr40fFVnUP5Oj+veunVezqYl9z7DYw9xzw=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
|
|
@ -1963,8 +1966,9 @@ golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac/go.mod h1:tRJNPiyCQ0inRvYxb
|
|||
golang.org/x/time v0.0.0-20220210224613-90d013bbcef8/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20220224211638-0e9765cccd65/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20220609170525-579cf78fd858/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9 h1:ftMN5LMiBFjbzleLqtoBZk7KdJwhuybIU+FckUHgoyQ=
|
||||
golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.2.0 h1:52I/1L54xyEQAYdtcSuxtiT84KGYTBGXwayxmIpNJhE=
|
||||
golang.org/x/time v0.2.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
|
|
|
|||
Loading…
Reference in New Issue