Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
314ed8700b
commit
0c2b535267
|
|
@ -26,8 +26,8 @@ export default {
|
|||
<template>
|
||||
<div v-gl-tooltip="tooltip" class="gl-display-inline-block gl-text-secondary gl-mb-3 gl-mr-4">
|
||||
<gl-icon v-if="icon" :name="icon" :size="12" />
|
||||
<!-- display tooltip as a label for screen readers -->
|
||||
<span class="gl-sr-only">{{ tooltip }}</span>
|
||||
<!-- display tooltip as a label for screen readers and make it unavailable for copying -->
|
||||
<span class="gl-sr-only gl-user-select-none">{{ tooltip }}</span>
|
||||
<slot></slot>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ module Users
|
|||
end
|
||||
|
||||
def self.distinct_users_sql
|
||||
name = users_table.table_name
|
||||
name = users_table.name
|
||||
Arel.sql("DISTINCT ON(#{name}.id) #{name}.*")
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ class EventPresenter < Gitlab::View::Presenter::Delegated
|
|||
# Caching `visible_to_user?` method in the presenter because it might be called multiple times.
|
||||
delegator_override :visible_to_user?
|
||||
def visible_to_user?(user = nil)
|
||||
return super(user) unless user
|
||||
|
||||
@visible_to_user_cache.fetch(user&.id) { super(user) }
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -210,5 +210,6 @@
|
|||
locals: { membership_source: @project,
|
||||
group: @group,
|
||||
current_user_is_group_owner: current_user_is_group_owner }
|
||||
- c.with_footer do
|
||||
= paginate @project_members, param_name: 'project_members_page', theme: 'gitlab'
|
||||
- unless @project_members.size < Kaminari.config.default_per_page
|
||||
- c.with_footer do
|
||||
= paginate @project_members, param_name: 'project_members_page', theme: 'gitlab'
|
||||
|
|
|
|||
|
|
@ -1,8 +0,0 @@
|
|||
---
|
||||
name: invert_omniauth_args_merging
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/135770
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/430348
|
||||
milestone: '16.6'
|
||||
type: development
|
||||
group: group::authentication
|
||||
default_enabled: false
|
||||
|
|
@ -9,6 +9,7 @@ value_type: number
|
|||
status: active
|
||||
time_frame: 28d
|
||||
data_source: database
|
||||
instrumentation_class: UniqueUsersAllImportsMetric
|
||||
distribution:
|
||||
- ce
|
||||
- ee
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ value_type: number
|
|||
status: active
|
||||
time_frame: all
|
||||
data_source: database
|
||||
instrumentation_class: UniqueUsersAllImportsMetric
|
||||
distribution:
|
||||
- ce
|
||||
- ee
|
||||
|
|
|
|||
|
|
@ -0,0 +1,41 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class EnsureIdUniquenessForPCiBuildsV4 < Gitlab::Database::Migration[2.2]
|
||||
include Gitlab::Database::SchemaHelpers
|
||||
include Gitlab::Database::MigrationHelpers::WraparoundAutovacuum
|
||||
|
||||
enable_lock_retries!
|
||||
milestone '16.7'
|
||||
|
||||
TABLE_NAME = :p_ci_builds
|
||||
FUNCTION_NAME = :assign_p_ci_builds_id_value
|
||||
TRIGGER_NAME = :assign_p_ci_builds_id_trigger
|
||||
|
||||
def up
|
||||
return unless should_run?
|
||||
|
||||
lock_tables(TABLE_NAME, :ci_builds)
|
||||
|
||||
Gitlab::Database::PostgresPartitionedTable.each_partition(TABLE_NAME) do |partition|
|
||||
drop_trigger(partition.identifier, TRIGGER_NAME, if_exists: true)
|
||||
end
|
||||
|
||||
create_trigger(TABLE_NAME, TRIGGER_NAME, FUNCTION_NAME, fires: 'BEFORE INSERT')
|
||||
end
|
||||
|
||||
def down
|
||||
return unless should_run?
|
||||
|
||||
drop_trigger(TABLE_NAME, TRIGGER_NAME, if_exists: true)
|
||||
|
||||
Gitlab::Database::PostgresPartitionedTable.each_partition(TABLE_NAME) do |partition|
|
||||
create_trigger(partition.identifier, TRIGGER_NAME, FUNCTION_NAME, fires: 'BEFORE INSERT')
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def should_run?
|
||||
can_execute_on?(:ci_builds)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
5ee1339e939d6fe7012d292758c6081b86682adde3a9f2d852e6fc4aa9a8db8c
|
||||
|
|
@ -36893,7 +36893,7 @@ ALTER INDEX p_ci_builds_scheduled_at_idx ATTACH PARTITION partial_index_ci_build
|
|||
|
||||
ALTER INDEX p_ci_builds_token_encrypted_partition_id_idx ATTACH PARTITION unique_ci_builds_token_encrypted_and_partition_id;
|
||||
|
||||
CREATE TRIGGER assign_p_ci_builds_id_trigger BEFORE INSERT ON ci_builds FOR EACH ROW EXECUTE FUNCTION assign_p_ci_builds_id_value();
|
||||
CREATE TRIGGER assign_p_ci_builds_id_trigger BEFORE INSERT ON p_ci_builds FOR EACH ROW EXECUTE FUNCTION assign_p_ci_builds_id_value();
|
||||
|
||||
CREATE TRIGGER chat_names_loose_fk_trigger AFTER DELETE ON chat_names REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records();
|
||||
|
||||
|
|
|
|||
|
|
@ -903,6 +903,11 @@ Example response:
|
|||
}
|
||||
```
|
||||
|
||||
Users on [GitLab Premium or Ultimate](https://about.gitlab.com/pricing/) also see these
|
||||
preferences:
|
||||
|
||||
- `code_suggestions`
|
||||
|
||||
Parameters:
|
||||
|
||||
- **none**
|
||||
|
|
@ -933,6 +938,12 @@ Parameters:
|
|||
| `show_whitespace_in_diffs` | Yes | Flag indicating the user sees whitespace changes in diffs. |
|
||||
| `pass_user_identities_to_ci_jwt` | Yes | Flag indicating the user passes their external identities as CI information. This attribute does not contain enough information to identify or authorize the user in an external system. The attribute is internal to GitLab, and must not be passed to third-party services. For more information and examples, see [Token Payload](../ci/secrets/id_token_authentication.md#token-payload). |
|
||||
|
||||
Users on [GitLab Premium or Ultimate](https://about.gitlab.com/pricing/) also can update these parameters:
|
||||
|
||||
| Attribute | Required | Description |
|
||||
|:---------------------------------|:---------|:---------------------------------------------------|
|
||||
| `code_suggestions` | No | Flag indicating the user allows code suggestions. Argument is experimental and can be removed in the future without notice. |
|
||||
|
||||
## User follow
|
||||
|
||||
### Follow and unfollow users
|
||||
|
|
|
|||
|
|
@ -398,7 +398,7 @@ Do not use **confirmation box** or **confirmation dialog box**. See also [**dial
|
|||
|
||||
## container registry
|
||||
|
||||
When documenting the GitLab container registry features and functionality, use lower case.
|
||||
When documenting the GitLab container registry features and functionality, use lowercase.
|
||||
|
||||
Use:
|
||||
|
||||
|
|
@ -1256,9 +1256,14 @@ Do not use bold.
|
|||
Do not use **Owner permissions**. A user who is assigned the Owner role has a set of associated permissions.
|
||||
An Owner is the highest role a user can have.
|
||||
|
||||
## Package Registry
|
||||
## package registry
|
||||
|
||||
Use title case for the GitLab Package Registry.
|
||||
When documenting the GitLab package registry features and functionality, use lowercase.
|
||||
|
||||
Use:
|
||||
|
||||
- The GitLab package registry supports A, B, and C.
|
||||
- You can publish a package to your project's package registry.
|
||||
|
||||
## page
|
||||
|
||||
|
|
|
|||
|
|
@ -34,6 +34,33 @@ For more information about upgrading GitLab Helm Chart, see [the release notes f
|
|||
|
||||
- Git 2.42.0 and later is required by Gitaly. For self-compiled installations, you should use the [Git version provided by Gitaly](../../install/installation.md#git).
|
||||
|
||||
### Linux package installations
|
||||
|
||||
- SSH clone URLs can be customized by setting `gitlab_rails['gitlab_ssh_host']`
|
||||
in `/etc/gitlab/gitlab.rb`. This setting must now be a
|
||||
[valid hostname](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/132238).
|
||||
Previously, it could be an arbitrary string that was used to show a
|
||||
custom hostname and port in the repository clone URL.
|
||||
|
||||
For example, prior to GitLab 16.5, the following setting worked:
|
||||
|
||||
```ruby
|
||||
gitlab_rails['gitlab_ssh_host'] = "gitlab.example.com:2222"
|
||||
```
|
||||
|
||||
Starting with GitLab 16.5, the hostname and port must be specified separately:
|
||||
|
||||
```ruby
|
||||
gitlab_rails['gitlab_ssh_host'] = "gitlab.example.com"
|
||||
gitlab_rails['gitlab_shell_ssh_port'] = 2222
|
||||
```
|
||||
|
||||
After you change the setting, make sure to reconfigure GitLab:
|
||||
|
||||
```shell
|
||||
sudo gitlab-ctl reconfigure
|
||||
```
|
||||
|
||||
### Geo installations
|
||||
|
||||
Specific information applies to installations using Geo:
|
||||
|
|
|
|||
|
|
@ -89,10 +89,11 @@ the following sections and tables provide an alternative.
|
|||
|
||||
## Scan result policy schema
|
||||
|
||||
> The `approval_settings` fields was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/418752) in GitLab 16.4 [with flags](../../../administration/feature_flags.md) named `scan_result_policies_block_unprotecting_branches`, `scan_result_any_merge_request`, or `scan_result_policies_block_force_push`. All are disabled by default.
|
||||
> The `approval_settings` fields were [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/418752) in GitLab 16.4 [with flags](../../../administration/feature_flags.md) named `scan_result_policies_block_unprotecting_branches`, `scan_result_any_merge_request`, or `scan_result_policies_block_force_push`. Flag `scan_result_policies_block_unprotecting_branches` is disabled by default. Flags `scan_result_any_merge_request` and `scan_result_policies_block_force_push` are enabled by default.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default the `approval_settings` field is unavailable. To show the feature, an administrator can [enable the feature flags](../../../administration/feature_flags.md) named `scan_result_policies_block_unprotecting_branches`, `scan_result_any_merge_request`, or `scan_result_policies_block_force_push`. See the `approval_settings` section below for more information.
|
||||
On self-managed GitLab, by default the `approval_settings` field is available. To hide the feature, an administrator can [disable the feature flag](../../../administration/feature_flags.md) named `scan_result_any_merge_request`.
|
||||
On GitLab.com, this feature is available. See the `approval_settings` section below for more information.
|
||||
|
||||
| Field | Type | Required |Possible values | Description |
|
||||
|--------|------|----------|----------------|-------------|
|
||||
|
|
@ -145,10 +146,11 @@ This rule enforces the defined actions based on license findings.
|
|||
## `any_merge_request` rule type
|
||||
|
||||
> - The `branch_exceptions` field was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/418741) in GitLab 16.3 [with a flag](../../../administration/feature_flags.md) named `security_policies_branch_exceptions`. Enabled by default. [Generally available](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/133753) in GitLab 16.5. Feature flag removed.
|
||||
> - The `any_merge_request` rule type was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/418752) in GitLab 16.4. Disabled by default.
|
||||
> - The `any_merge_request` rule type was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/418752) in GitLab 16.4. Enabled by default. [Generally available](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/136298) in GitLab 16.6.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default the `any_merge_request` field is not available. To show the feature, an administrator can [enable the feature flag](../../../administration/feature_flags.md) named `any_merge_request`.
|
||||
On self-managed GitLab, by default the `any_merge_request` field is available. To hide the feature, an administrator can [disable the feature flag](../../../administration/feature_flags.md) named `any_merge_request`.
|
||||
On GitLab.com, this feature is available.
|
||||
|
||||
This rule enforces the defined actions for any merge request based on the commits signature.
|
||||
|
||||
|
|
@ -179,24 +181,24 @@ the defined policy.
|
|||
|
||||
> - The `block_unprotecting_branches` field was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/423101) in GitLab 16.4 [with flag](../../../administration/feature_flags.md) named `scan_result_policy_settings`. Disabled by default.
|
||||
> - The `scan_result_policy_settings` feature flag was replaced by the `scan_result_policies_block_unprotecting_branches` feature flag in 16.4.
|
||||
> - The `prevent_approval_by_author`, `prevent_approval_by_commit_author`, `remove_approvals_with_new_commit`, and `require_password_to_approve` fields were [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/418752) in GitLab 16.4 [with flag](../../../administration/feature_flags.md) named `scan_result_any_merge_request`. Disabled by default.
|
||||
> - The `prevent_force_pushing` field was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/420629) in GitLab 16.4 [with flag](../../../administration/feature_flags.md) named `scan_result_policies_block_force_push`. Disabled by default.
|
||||
> - The `prevent_approval_by_author`, `prevent_approval_by_commit_author`, `remove_approvals_with_new_commit`, and `require_password_to_approve` fields were [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/418752) in GitLab 16.4 [with flag](../../../administration/feature_flags.md) named `scan_result_any_merge_request`. Enabled by default.
|
||||
> - The `prevent_pushing_and_force_pushing` field was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/420629) in GitLab 16.4 [with flag](../../../administration/feature_flags.md) named `scan_result_policies_block_force_push`. Enabled by default.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default the `block_unprotecting_branches` field is unavailable. To show the feature, an administrator can [enable the feature flag](../../../administration/feature_flags.md) named `scan_result_policies_block_unprotecting_branches`. On GitLab.com, this feature is unavailable.
|
||||
On self-managed GitLab, by default the `prevent_approval_by_author`, `prevent_approval_by_commit_author`, `remove_approvals_with_new_commit`, and `require_password_to_approve` fields are unavailable. To show the feature, an administrator can [enable the feature flag](../../../administration/feature_flags.md) named `scan_result_any_merge_request`. On GitLab.com, this feature is available.
|
||||
On self-managed GitLab, by default the `prevent_force_pushing` field is unavailable. To show the feature, an administrator can [enable the feature flag](../../../administration/feature_flags.md) named `security_policies_branch_exceptions`. On GitLab.com, this feature is unavailable.
|
||||
On self-managed GitLab, by default the `prevent_approval_by_author`, `prevent_approval_by_commit_author`, `remove_approvals_with_new_commit`, and `require_password_to_approve` fields are available. To hide the feature, an administrator can [disable the feature flag](../../../administration/feature_flags.md) named `scan_result_any_merge_request`. On GitLab.com, this feature is available.
|
||||
On self-managed GitLab, by default the `prevent_pushing_and_force_pushing` field is available. To hide the feature, an administrator can [disable the feature flag](../../../administration/feature_flags.md) named `scan_result_policies_block_force_push`. On GitLab.com, this feature is available.
|
||||
|
||||
The settings set in the policy overwrite settings in the project.
|
||||
|
||||
| Field | Type | Required | Possible values | Description |
|
||||
|-------|------|----------|-----------------|-------------|
|
||||
| Field | Type | Required | Possible values | Description |
|
||||
|-------------------------------------|------|----------|-----------------|-------------|
|
||||
| `block_unprotecting_branches` | `boolean` | false | `true`, `false` | Prevent a user from removing a branch from the protected branches list, deleting a protected branch, or changing the default branch if that branch is included in the security policy. |
|
||||
| `prevent_approval_by_author` | `boolean` | false | `true`, `false` | When enabled, two person approval is required on all MRs as merge request authors cannot approve their own MRs and merge them unilaterally. |
|
||||
| `prevent_approval_by_commit_author` | `boolean` | false | `true`, `false` | When enabled, users who have contributed code to the MR are ineligible for approval, ensuring code committers cannot introduce vulnerabilities and approve code to merge. |
|
||||
| `remove_approvals_with_new_commit` | `boolean` | false | `true`, `false` | If an MR receives all necessary approvals to merge, but then a new commit is added, new approvals are required. This ensures new commits that may include vulnerabilities cannot be introduced. |
|
||||
| `require_password_to_approve` | `boolean` | false | `true`, `false` | Password confirmation on approvals provides an additional level of security. Enabling this enforces the setting on all projects targeted by this policy. |
|
||||
| `prevent_force_pushing` | `boolean` | false | `true`, `false` | Prevent pushing and force pushing to a protected branch. |
|
||||
| `prevent_pushing_and_force_pushing` | `boolean` | false | `true`, `false` | Prevent pushing and force pushing to a protected branch. |
|
||||
|
||||
## Example security scan result policies project
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,10 @@
|
|||
module API
|
||||
module Entities
|
||||
class UserPreferences < Grape::Entity
|
||||
expose :id, :user_id, :view_diffs_file_by_file, :show_whitespace_in_diffs, :pass_user_identities_to_ci_jwt
|
||||
expose :id, :user_id, :view_diffs_file_by_file,
|
||||
:show_whitespace_in_diffs, :pass_user_identities_to_ci_jwt
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
API::Entities::UserPreferences.prepend_mod_with('API::Entities::UserPreferences', with_descendants: true)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module API
|
||||
module Helpers
|
||||
module UserPreferencesHelpers
|
||||
extend ActiveSupport::Concern
|
||||
extend Grape::API::Helpers
|
||||
|
||||
def update_user_namespace_settings(attrs)
|
||||
# This method will be redefined in EE.
|
||||
attrs
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
API::Helpers::UserPreferencesHelpers.prepend_mod_with('API::Helpers::UserPreferencesHelpers')
|
||||
|
|
@ -32,15 +32,20 @@ module API
|
|||
helpers Gitlab::Tracking::Helpers::WeakPasswordErrorEvent
|
||||
|
||||
helpers do
|
||||
def custom_order_by_or_sort?
|
||||
params[:order_by].present? || params[:sort].present?
|
||||
end
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def reorder_users(users)
|
||||
# Users#search orders by exact matches and handles pagination,
|
||||
# so we should prioritize that.
|
||||
if params[:search]
|
||||
# so we should prioritize that, unless the user specifies some custom
|
||||
# sort.
|
||||
if params[:search] && !custom_order_by_or_sort?
|
||||
users
|
||||
else
|
||||
# Note that params[:order_by] and params[:sort] will always be present and
|
||||
# default to "id" and "desc" as defined in `sort_params`.
|
||||
params[:order_by] ||= 'id'
|
||||
params[:sort] ||= 'desc'
|
||||
users.reorder(order_options_with_tie_breaker)
|
||||
end
|
||||
end
|
||||
|
|
@ -82,6 +87,17 @@ module API
|
|||
optional :sort, type: String, values: %w[asc desc], default: 'desc',
|
||||
desc: 'Return users sorted in ascending and descending order'
|
||||
end
|
||||
|
||||
# Grape doesn't make it easy to tell whether a user supplied a
|
||||
# value for optional parameters with defaults. Disable the
|
||||
# defaults so that we can manually assign defaults if they are
|
||||
# not provided.
|
||||
params :sort_params_no_defaults do
|
||||
optional :order_by, type: String, values: %w[id name username created_at updated_at],
|
||||
desc: 'Return users ordered by a field'
|
||||
optional :sort, type: String, values: %w[asc desc],
|
||||
desc: 'Return users sorted in ascending and descending order'
|
||||
end
|
||||
end
|
||||
|
||||
desc 'Get the list of users' do
|
||||
|
|
@ -106,7 +122,7 @@ module API
|
|||
optional :two_factor, type: String, desc: 'Filter users by Two-factor authentication.'
|
||||
all_or_none_of :extern_uid, :provider
|
||||
|
||||
use :sort_params
|
||||
use :sort_params_no_defaults
|
||||
use :pagination
|
||||
use :with_custom_attributes
|
||||
use :optional_index_params_ee
|
||||
|
|
@ -1061,6 +1077,8 @@ module API
|
|||
end
|
||||
end
|
||||
|
||||
helpers Helpers::UserPreferencesHelpers
|
||||
|
||||
desc "Get the currently authenticated user's SSH keys" do
|
||||
success Entities::SSHKey
|
||||
end
|
||||
|
|
@ -1251,7 +1269,9 @@ module API
|
|||
optional :view_diffs_file_by_file, type: Boolean, desc: 'Flag indicating the user sees only one file diff per page'
|
||||
optional :show_whitespace_in_diffs, type: Boolean, desc: 'Flag indicating the user sees whitespace changes in diffs'
|
||||
optional :pass_user_identities_to_ci_jwt, type: Boolean, desc: 'Flag indicating the user passes their external identities to a CI job as part of a JSON web token.'
|
||||
at_least_one_of :view_diffs_file_by_file, :show_whitespace_in_diffs, :pass_user_identities_to_ci_jwt
|
||||
optional :code_suggestions, type: Boolean, desc: 'Flag indicating the user allows code suggestions.' \
|
||||
'Argument is experimental and can be removed in the future without notice.'
|
||||
at_least_one_of :view_diffs_file_by_file, :show_whitespace_in_diffs, :pass_user_identities_to_ci_jwt, :code_suggestions
|
||||
end
|
||||
put "preferences", feature_category: :user_profile, urgency: :high do
|
||||
authenticate!
|
||||
|
|
@ -1260,6 +1280,10 @@ module API
|
|||
|
||||
attrs = declared_params(include_missing: false)
|
||||
|
||||
attrs = update_user_namespace_settings(attrs)
|
||||
|
||||
render_api_error!('400 Bad Request', 400) unless attrs
|
||||
|
||||
service = ::UserPreferences::UpdateService.new(current_user, attrs).execute
|
||||
if service.success?
|
||||
present preferences, with: Entities::UserPreferences
|
||||
|
|
|
|||
|
|
@ -17,26 +17,27 @@ module ClickHouse
|
|||
@schema_migration = schema_migration
|
||||
end
|
||||
|
||||
def up(target_version = nil, &block)
|
||||
def up(target_version = nil, step = nil, &block)
|
||||
selected_migrations = block ? migrations.select(&block) : migrations
|
||||
|
||||
migrate(:up, selected_migrations, target_version)
|
||||
migrate(:up, selected_migrations, target_version, step)
|
||||
end
|
||||
|
||||
def down(target_version = nil, &block)
|
||||
def down(target_version = nil, step = 1, &block)
|
||||
selected_migrations = block ? migrations.select(&block) : migrations
|
||||
|
||||
migrate(:down, selected_migrations, target_version)
|
||||
migrate(:down, selected_migrations, target_version, step)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def migrate(direction, selected_migrations, target_version = nil)
|
||||
def migrate(direction, selected_migrations, target_version = nil, step = nil)
|
||||
ClickHouse::MigrationSupport::Migrator.new(
|
||||
direction,
|
||||
selected_migrations,
|
||||
schema_migration,
|
||||
target_version
|
||||
target_version,
|
||||
step
|
||||
).migrate
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -11,9 +11,13 @@ module ClickHouse
|
|||
|
||||
self.migrations_paths = ["db/click_house/migrate"]
|
||||
|
||||
def initialize(direction, migrations, schema_migration, target_version = nil, logger = Gitlab::AppLogger)
|
||||
def initialize(
|
||||
direction, migrations, schema_migration, target_version = nil, step = nil,
|
||||
logger = Gitlab::AppLogger
|
||||
)
|
||||
@direction = direction
|
||||
@target_version = target_version
|
||||
@step = step
|
||||
@migrated_versions = {}
|
||||
@migrations = migrations
|
||||
@schema_migration = schema_migration
|
||||
|
|
@ -47,12 +51,15 @@ module ClickHouse
|
|||
runnable = migrations[start..finish]
|
||||
|
||||
if up?
|
||||
runnable.reject { |m| ran?(m) }
|
||||
runnable = runnable.reject { |m| ran?(m) }
|
||||
else
|
||||
# skip the last migration if we're headed down, but not ALL the way down
|
||||
runnable.pop if target
|
||||
runnable.find_all { |m| ran?(m) }
|
||||
runnable = runnable.find_all { |m| ran?(m) }
|
||||
end
|
||||
|
||||
runnable = runnable.take(@step) if @step && !@target_version
|
||||
runnable
|
||||
end
|
||||
|
||||
def migrations
|
||||
|
|
|
|||
|
|
@ -8,21 +8,6 @@ module Gitlab
|
|||
def enabled?
|
||||
::AuthHelper.saml_providers.any?
|
||||
end
|
||||
|
||||
def default_attribute_statements
|
||||
defaults = OmniAuth::Strategies::SAML.default_options[:attribute_statements].to_hash.deep_symbolize_keys
|
||||
defaults[:nickname] = %w[username nickname]
|
||||
defaults[:name] << 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name'
|
||||
defaults[:name] << 'http://schemas.microsoft.com/ws/2008/06/identity/claims/name'
|
||||
defaults[:email] << 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress'
|
||||
defaults[:email] << 'http://schemas.microsoft.com/ws/2008/06/identity/claims/emailaddress'
|
||||
defaults[:first_name] << 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname'
|
||||
defaults[:first_name] << 'http://schemas.microsoft.com/ws/2008/06/identity/claims/givenname'
|
||||
defaults[:last_name] << 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname'
|
||||
defaults[:last_name] << 'http://schemas.microsoft.com/ws/2008/06/identity/claims/surname'
|
||||
|
||||
defaults
|
||||
end
|
||||
end
|
||||
|
||||
DEFAULT_PROVIDER_NAME = 'saml'
|
||||
|
|
|
|||
|
|
@ -41,7 +41,11 @@ module Gitlab
|
|||
encrypted.change do |contents|
|
||||
contents = encrypted_file_template unless File.exist?(encrypted.content_path)
|
||||
File.write(temp_file.path, contents)
|
||||
system(ENV['EDITOR'], temp_file.path)
|
||||
|
||||
edit_success = system(*editor_args, temp_file.path)
|
||||
|
||||
raise "Unable to run $EDITOR: #{editor_args}" unless edit_success
|
||||
|
||||
changes = File.read(temp_file.path)
|
||||
contents_changed = contents != changes
|
||||
validate_contents(changes)
|
||||
|
|
@ -99,6 +103,10 @@ module Gitlab
|
|||
def encrypted_file_template
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def editor_args
|
||||
ENV['EDITOR']&.split
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -29,8 +29,6 @@ module Gitlab
|
|||
{
|
||||
authorize_params: { gl_auth_type: 'login' }
|
||||
}
|
||||
when ->(provider_name) { AuthHelper.saml_providers.include?(provider_name.to_sym) }
|
||||
{ attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements }
|
||||
else
|
||||
{}
|
||||
end
|
||||
|
|
@ -63,7 +61,7 @@ module Gitlab
|
|||
provider_arguments.concat arguments
|
||||
provider_arguments << defaults unless defaults.empty?
|
||||
when Hash, GitlabSettings::Options
|
||||
hash_arguments = merge_hash_defaults_and_args(defaults, arguments)
|
||||
hash_arguments = arguments.deep_symbolize_keys.deep_merge(defaults)
|
||||
normalized = normalize_hash_arguments(hash_arguments)
|
||||
|
||||
# A Hash from the configuration will be passed as is.
|
||||
|
|
@ -82,13 +80,6 @@ module Gitlab
|
|||
provider_arguments
|
||||
end
|
||||
|
||||
def merge_hash_defaults_and_args(defaults, arguments)
|
||||
return arguments.to_hash if defaults.empty?
|
||||
return defaults.deep_merge(arguments.deep_symbolize_keys) if Feature.enabled?(:invert_omniauth_args_merging)
|
||||
|
||||
arguments.to_hash.deep_symbolize_keys.deep_merge(defaults)
|
||||
end
|
||||
|
||||
def normalize_hash_arguments(args)
|
||||
args.deep_symbolize_keys!
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Usage
|
||||
module Metrics
|
||||
module Instrumentations
|
||||
class BulkImportsUsersMetric < DatabaseMetric
|
||||
operation :distinct_count, column: :user_id
|
||||
|
||||
relation do
|
||||
::BulkImport
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Usage
|
||||
module Metrics
|
||||
module Instrumentations
|
||||
class CsvImportsUsersMetric < DatabaseMetric
|
||||
operation :distinct_count, column: :user_id
|
||||
|
||||
relation do
|
||||
::Issues::CsvImport
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Usage
|
||||
module Metrics
|
||||
module Instrumentations
|
||||
class GroupImportsUsersMetric < DatabaseMetric
|
||||
operation :distinct_count, column: :user_id
|
||||
|
||||
relation do
|
||||
::GroupImportState
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Usage
|
||||
module Metrics
|
||||
module Instrumentations
|
||||
class JiraImportsUsersMetric < DatabaseMetric
|
||||
operation :distinct_count, column: :user_id
|
||||
|
||||
relation do
|
||||
::JiraImportState
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Usage
|
||||
module Metrics
|
||||
module Instrumentations
|
||||
class ProjectImportsCreatorsMetric < DatabaseMetric
|
||||
operation :distinct_count, column: :creator_id
|
||||
|
||||
relation do
|
||||
::Project.where.not(import_type: nil)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Usage
|
||||
module Metrics
|
||||
module Instrumentations
|
||||
class UniqueUsersAllImportsMetric < NumbersMetric
|
||||
IMPORTS_METRICS = [
|
||||
ProjectImportsCreatorsMetric,
|
||||
BulkImportsUsersMetric,
|
||||
JiraImportsUsersMetric,
|
||||
CsvImportsUsersMetric,
|
||||
GroupImportsUsersMetric
|
||||
].freeze
|
||||
|
||||
operation :add
|
||||
|
||||
data do |time_frame|
|
||||
IMPORTS_METRICS.map { |metric| metric.new(time_frame: time_frame).value }
|
||||
end
|
||||
|
||||
# overwriting instrumentation to generate the appropriate sql query
|
||||
def instrumentation
|
||||
metric_queries = IMPORTS_METRICS.map do |metric|
|
||||
"(#{metric.new(time_frame: time_frame).instrumentation})"
|
||||
end.join(' + ')
|
||||
|
||||
"SELECT #{metric_queries}"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -365,7 +365,6 @@ module Gitlab
|
|||
users_created: count(::User.where(time_period), start: minimum_id(User), finish: maximum_id(User)),
|
||||
omniauth_providers: filtered_omniauth_provider_names.reject { |name| name == 'group_saml' },
|
||||
user_auth_by_provider: distinct_count_user_auth_by_provider(time_period),
|
||||
unique_users_all_imports: unique_users_all_imports(time_period),
|
||||
bulk_imports: {
|
||||
gitlab_v1: count(::BulkImport.where(**time_period, source_type: :gitlab))
|
||||
},
|
||||
|
|
@ -564,18 +563,6 @@ module Gitlab
|
|||
}
|
||||
end
|
||||
|
||||
# rubocop:disable CodeReuse/ActiveRecord
|
||||
def unique_users_all_imports(time_period)
|
||||
project_imports = distinct_count(::Project.where(time_period).where.not(import_type: nil), :creator_id)
|
||||
bulk_imports = distinct_count(::BulkImport.where(time_period), :user_id)
|
||||
jira_issue_imports = distinct_count(::JiraImportState.where(time_period), :user_id)
|
||||
csv_issue_imports = distinct_count(::Issues::CsvImport.where(time_period), :user_id)
|
||||
group_imports = distinct_count(::GroupImportState.where(time_period), :user_id)
|
||||
|
||||
add(project_imports, bulk_imports, jira_issue_imports, csv_issue_imports, group_imports)
|
||||
end
|
||||
# rubocop:enable CodeReuse/ActiveRecord
|
||||
|
||||
# rubocop:disable CodeReuse/ActiveRecord
|
||||
def distinct_count_user_auth_by_provider(time_period)
|
||||
counts = auth_providers_except_ldap.index_with do |provider|
|
||||
|
|
|
|||
|
|
@ -8,12 +8,12 @@ namespace :gitlab do
|
|||
ClickHouse::MigrationSupport::SchemaMigration.create_table(args.database&.to_sym || :main)
|
||||
end
|
||||
|
||||
desc 'GitLab | ClickHouse | Migrate'
|
||||
desc 'GitLab | ClickHouse | Migrate the database (options: VERSION=x, VERBOSE=false, SCOPE=y)'
|
||||
task migrate: [:prepare_schema_migration_table] do
|
||||
migrate(:up)
|
||||
end
|
||||
|
||||
desc 'GitLab | ClickHouse | Rollback'
|
||||
desc 'GitLab | ClickHouse | Rolls the schema back to the previous version (specify steps w/ STEP=n)'
|
||||
task rollback: [:prepare_schema_migration_table] do
|
||||
migrate(:down)
|
||||
end
|
||||
|
|
@ -42,13 +42,17 @@ namespace :gitlab do
|
|||
check_target_version
|
||||
|
||||
scope = ENV['SCOPE']
|
||||
step = ENV['STEP'] ? Integer(ENV['STEP']) : nil
|
||||
step = 1 if step.nil? && direction == :down
|
||||
raise ArgumentError, 'STEP should be a positive number' if step.present? && step < 1
|
||||
|
||||
verbose_was = ClickHouse::Migration.verbose
|
||||
ClickHouse::Migration.verbose = ENV['VERBOSE'] ? ENV['VERBOSE'] != 'false' : true
|
||||
|
||||
migrations_paths = ClickHouse::MigrationSupport::Migrator.migrations_paths
|
||||
schema_migration = ClickHouse::MigrationSupport::SchemaMigration
|
||||
migration_context = ClickHouse::MigrationSupport::MigrationContext.new(migrations_paths, schema_migration)
|
||||
migrations_ran = migration_context.public_send(direction, target_version) do |migration|
|
||||
migrations_ran = migration_context.public_send(direction, target_version, step) do |migration|
|
||||
scope.blank? || scope == migration.scope
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -223,12 +223,12 @@ namespace :gitlab do
|
|||
# :nocov:
|
||||
end
|
||||
|
||||
# During testing, db:test:load restores the database schema from scratch
|
||||
# During testing, db:test:load_schema restores the database schema from scratch
|
||||
# which does not include dynamic partitions. We cannot rely on application
|
||||
# initializers here as the application can continue to run while
|
||||
# a rake task reloads the database schema.
|
||||
Rake::Task['db:test:load'].enhance do
|
||||
# Due to bug in `db:test:load` if many DBs are used
|
||||
Rake::Task['db:test:load_schema'].enhance do
|
||||
# Due to bug in `db:test:load_schema` if many DBs are used
|
||||
# the `ActiveRecord::Base.connection` might be switched to another one
|
||||
# This is due to `if should_reconnect`:
|
||||
# https://github.com/rails/rails/blob/a81aeb63a007ede2fe606c50539417dada9030c7/activerecord/lib/active_record/railties/databases.rake#L622
|
||||
|
|
|
|||
|
|
@ -25350,6 +25350,9 @@ msgstr ""
|
|||
msgid "Inform users without uploaded SSH keys that they can't push over SSH until one is added"
|
||||
msgstr ""
|
||||
|
||||
msgid "Information"
|
||||
msgstr ""
|
||||
|
||||
msgid "Infrastructure"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -42959,6 +42962,9 @@ msgstr ""
|
|||
msgid "SecurityOrchestration|.yaml preview"
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityOrchestration|A compliance framework is a label to identify that your project has certain compliance requirements. %{linkStart}Learn more%{linkEnd}."
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityOrchestration|Actions"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -43007,6 +43013,9 @@ msgstr ""
|
|||
msgid "SecurityOrchestration|Are you sure you want to delete this policy? This action cannot be undone."
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityOrchestration|At least one framework label should be selected"
|
||||
msgstr ""
|
||||
|
||||
msgid "SecurityOrchestration|Automatically selected runners"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -50604,9 +50613,6 @@ msgstr ""
|
|||
msgid "Tracing|Attributes"
|
||||
msgstr ""
|
||||
|
||||
msgid "Tracing|Check again"
|
||||
msgstr ""
|
||||
|
||||
msgid "Tracing|Date"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -50664,10 +50670,10 @@ msgstr ""
|
|||
msgid "Tracing|Metadata"
|
||||
msgstr ""
|
||||
|
||||
msgid "Tracing|No traces to display."
|
||||
msgid "Tracing|Operation"
|
||||
msgstr ""
|
||||
|
||||
msgid "Tracing|Operation"
|
||||
msgid "Tracing|Refresh the page, or edit your search filter and try again"
|
||||
msgstr ""
|
||||
|
||||
msgid "Tracing|Resource attributes"
|
||||
|
|
|
|||
|
|
@ -10,6 +10,10 @@
|
|||
|
||||
set -o errexit # AKA -e - exit immediately on errors (http://mywiki.wooledge.org/BashFAQ/105)
|
||||
|
||||
###########
|
||||
## Setup ##
|
||||
###########
|
||||
|
||||
# https://stackoverflow.com/a/28938235
|
||||
BCyan='\033[1;36m' # Bold Cyan
|
||||
BRed='\033[1;31m' # Bold Red
|
||||
|
|
@ -19,25 +23,67 @@ Color_Off='\033[0m' # Text Reset
|
|||
|
||||
function onexit_err() {
|
||||
local exit_status=${1:-$?}
|
||||
printf "\n❌❌❌ ${BRed}Remote Development specs failed!${Color_Off} ❌❌❌\n"
|
||||
printf "\n❌❌❌ ${BRed}Remote Development smoke test failed!${Color_Off} ❌❌❌\n"
|
||||
|
||||
if [ ${REVEAL_RUBOCOP_TODO} -ne 0 ]; then
|
||||
printf "\n(If the failure was due to rubocop, set REVEAL_RUBOCOP_TODO=0 to ignore TODOs)\n"
|
||||
fi
|
||||
|
||||
exit "${exit_status}"
|
||||
}
|
||||
trap onexit_err ERR
|
||||
set -o errexit
|
||||
|
||||
printf "${BCyan}"
|
||||
printf "\nStarting Remote Development specs.\n\n"
|
||||
printf "${Color_Off}"
|
||||
#####################
|
||||
## Invoke commands ##
|
||||
#####################
|
||||
|
||||
printf "${BBlue}Running Remote Development backend specs${Color_Off}\n\n"
|
||||
printf "${BCyan}\nStarting Remote Development smoke test...\n\n${Color_Off}"
|
||||
|
||||
bin/rspec -r spec_helper \
|
||||
$(find . -path './**/remote_development/*_spec.rb' | grep -v 'qa/qa') \
|
||||
ee/spec/graphql/types/query_type_spec.rb \
|
||||
ee/spec/graphql/types/subscription_type_spec.rb \
|
||||
ee/spec/requests/api/internal/kubernetes_spec.rb \
|
||||
spec/graphql/types/subscription_type_spec.rb \
|
||||
spec/lib/result_spec.rb \
|
||||
spec/support_specs/matchers/result_matchers_spec.rb
|
||||
#############
|
||||
## RUBOCOP ##
|
||||
#############
|
||||
|
||||
printf "${BBlue}Running RuboCop for Remote Development and related files${Color_Off}\n\n"
|
||||
|
||||
# TODO: Also run rubocop for the other non-remote-development files once they are passing rubocop
|
||||
# with REVEAL_RUBOCOP_TODO=1
|
||||
while IFS= read -r -d '' file; do
|
||||
files_for_rubocop+=("$file")
|
||||
done < <(find . -path './**/remote_development/*.rb' -print0)
|
||||
|
||||
REVEAL_RUBOCOP_TODO=${REVEAL_RUBOCOP_TODO:-1} bundle exec rubocop --parallel --force-exclusion --no-server "${files_for_rubocop[@]}"
|
||||
|
||||
###########
|
||||
## RSPEC ##
|
||||
###########
|
||||
|
||||
printf "\n\n${BBlue}Running Remote Development and related backend RSpec specs${Color_Off}\n\n"
|
||||
|
||||
while IFS= read -r file; do
|
||||
files_for_rspec+=("$file")
|
||||
done < <(find . -path './**/remote_development/*_spec.rb' | grep -v 'qa/qa')
|
||||
|
||||
files_for_rspec+=(
|
||||
"ee/spec/graphql/types/query_type_spec.rb"
|
||||
"ee/spec/graphql/types/subscription_type_spec.rb"
|
||||
"ee/spec/requests/api/internal/kubernetes_spec.rb"
|
||||
"spec/graphql/types/subscription_type_spec.rb"
|
||||
"spec/lib/result_spec.rb"
|
||||
"spec/support_specs/matchers/result_matchers_spec.rb"
|
||||
)
|
||||
bin/rspec -r spec_helper "${files_for_rspec[@]}"
|
||||
|
||||
##########
|
||||
## JEST ##
|
||||
##########
|
||||
|
||||
printf "\n\n${BBlue}Running Remote Development frontend Jest specs${Color_Off}\n\n"
|
||||
|
||||
yarn jest ee/spec/frontend/remote_development
|
||||
|
||||
###########################
|
||||
## Print success message ##
|
||||
###########################
|
||||
|
||||
printf "\n✅✅✅ ${BGreen}All Remote Development specs passed successfully!${Color_Off} ✅✅✅\n"
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ RSpec.describe ClickHouse::MigrationSupport::MigrationContext,
|
|||
end
|
||||
|
||||
describe 'performs migrations' do
|
||||
subject(:migration) { migrate(target_version, migration_context) }
|
||||
subject(:migration) { migrate(migration_context, target_version) }
|
||||
|
||||
describe 'when creating a table' do
|
||||
let(:migrations_dirname) { 'plain_table_creation' }
|
||||
|
|
@ -45,7 +45,7 @@ RSpec.describe ClickHouse::MigrationSupport::MigrationContext,
|
|||
let(:target_version) { 2 }
|
||||
|
||||
it 'drops table' do
|
||||
migrate(1, migration_context)
|
||||
migrate(migration_context, 1)
|
||||
expect(table_names).to include('some')
|
||||
|
||||
migration
|
||||
|
|
@ -120,7 +120,7 @@ RSpec.describe ClickHouse::MigrationSupport::MigrationContext,
|
|||
end
|
||||
|
||||
it 'registers migrations on respective database', :aggregate_failures do
|
||||
expect { migrate(2, migration_context) }
|
||||
expect { migrate(migration_context, 2) }
|
||||
.to change { active_schema_migrations_count(*main_db_config) }.from(0).to(1)
|
||||
.and change { active_schema_migrations_count(*another_db_config) }.from(0).to(1)
|
||||
|
||||
|
|
@ -137,7 +137,7 @@ RSpec.describe ClickHouse::MigrationSupport::MigrationContext,
|
|||
date: a_hash_including(type: 'Date')
|
||||
})
|
||||
|
||||
expect { migrate(nil, migration_context) }
|
||||
expect { migrate(migration_context, nil) }
|
||||
.to change { active_schema_migrations_count(*main_db_config) }.to(2)
|
||||
.and not_change { active_schema_migrations_count(*another_db_config) }
|
||||
|
||||
|
|
@ -187,38 +187,63 @@ RSpec.describe ClickHouse::MigrationSupport::MigrationContext,
|
|||
end
|
||||
|
||||
describe 'performs rollbacks' do
|
||||
subject(:migration) { rollback(target_version, migration_context) }
|
||||
subject(:migration) { rollback(migration_context, target_version) }
|
||||
|
||||
before do
|
||||
migrate(nil, migration_context)
|
||||
# Ensure that all migrations are up
|
||||
migrate(migration_context, nil)
|
||||
end
|
||||
|
||||
context 'when migrating back all the way to 0' do
|
||||
let(:target_version) { 0 }
|
||||
context 'when down method is present' do
|
||||
let(:migrations_dirname) { 'table_creation_with_down_method' }
|
||||
|
||||
context 'when down method is present' do
|
||||
let(:migrations_dirname) { 'table_creation_with_down_method' }
|
||||
context 'when specifying target_version' do
|
||||
it 'removes migrations and performs down method' do
|
||||
expect(table_names).to include('some', 'another')
|
||||
|
||||
it 'removes migration and performs down method' do
|
||||
# test that target_version is prioritized over step
|
||||
expect { rollback(migration_context, 1, 10000) }.to change { active_schema_migrations_count }.from(2).to(1)
|
||||
expect(table_names).not_to include('another')
|
||||
expect(table_names).to include('some')
|
||||
expect(schema_migrations).to contain_exactly(
|
||||
a_hash_including(version: '1', active: 1),
|
||||
a_hash_including(version: '2', active: 0)
|
||||
)
|
||||
|
||||
expect { migration }.to change { active_schema_migrations_count }.from(1).to(0)
|
||||
expect { rollback(migration_context, nil) }.to change { active_schema_migrations_count }.to(0)
|
||||
expect(table_names).not_to include('some', 'another')
|
||||
|
||||
expect(table_names).not_to include('some')
|
||||
expect(schema_migrations).to contain_exactly(a_hash_including(version: '1', active: 0))
|
||||
expect(schema_migrations).to contain_exactly(
|
||||
a_hash_including(version: '1', active: 0),
|
||||
a_hash_including(version: '2', active: 0)
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when down method is missing' do
|
||||
let(:migrations_dirname) { 'plain_table_creation' }
|
||||
context 'when specifying step' do
|
||||
it 'removes migrations and performs down method' do
|
||||
expect(table_names).to include('some', 'another')
|
||||
|
||||
it 'removes migration ignoring missing down method' do
|
||||
expect { migration }.to change { active_schema_migrations_count }.from(1).to(0)
|
||||
.and not_change { table_names & %w[some] }.from(%w[some])
|
||||
expect { rollback(migration_context, nil, 1) }.to change { active_schema_migrations_count }.from(2).to(1)
|
||||
expect(table_names).not_to include('another')
|
||||
expect(table_names).to include('some')
|
||||
|
||||
expect { rollback(migration_context, nil, 2) }.to change { active_schema_migrations_count }.to(0)
|
||||
expect(table_names).not_to include('some', 'another')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when down method is missing' do
|
||||
let(:migrations_dirname) { 'plain_table_creation' }
|
||||
let(:target_version) { 0 }
|
||||
|
||||
it 'removes migration ignoring missing down method' do
|
||||
expect { migration }.to change { active_schema_migrations_count }.from(1).to(0)
|
||||
.and not_change { table_names & %w[some] }.from(%w[some])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when target_version is incorrect' do
|
||||
let(:target_version) { -1 }
|
||||
let(:migrations_dirname) { 'plain_table_creation' }
|
||||
|
|
|
|||
|
|
@ -251,7 +251,7 @@ RSpec.describe GroupsController, factory_default: :keep, feature_category: :code
|
|||
post :create, params: { group: { parent_id: group.id, path: 'subgroup' } }
|
||||
|
||||
expect(response).to be_redirect
|
||||
expect(response.body).to match(%r{http://test.host/#{group.path}/subgroup})
|
||||
expect(response.location).to eq("http://test.host/#{group.path}/subgroup")
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
20
spec/fixtures/click_house/migrations/table_creation_with_down_method/2_create_another_table.rb
vendored
Normal file
20
spec/fixtures/click_house/migrations/table_creation_with_down_method/2_create_another_table.rb
vendored
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# rubocop: disable Gitlab/NamespacedClass -- Fixtures do not need to be namespaced
|
||||
class CreateAnotherTable < ClickHouse::Migration
|
||||
def up
|
||||
execute <<~SQL
|
||||
CREATE TABLE another (
|
||||
id UInt64,
|
||||
date Date
|
||||
) ENGINE = Memory
|
||||
SQL
|
||||
end
|
||||
|
||||
def down
|
||||
execute <<~SQL
|
||||
DROP TABLE another
|
||||
SQL
|
||||
end
|
||||
end
|
||||
# rubocop: enable Gitlab/NamespacedClass
|
||||
|
|
@ -19,41 +19,6 @@ RSpec.describe Gitlab::Auth::Saml::Config do
|
|||
end
|
||||
end
|
||||
|
||||
describe '.default_attribute_statements' do
|
||||
it 'includes upstream defaults, nickname and Microsoft values' do
|
||||
expect(described_class.default_attribute_statements).to match_array(
|
||||
{
|
||||
nickname: %w[username nickname],
|
||||
name: [
|
||||
'name',
|
||||
'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name',
|
||||
'http://schemas.microsoft.com/ws/2008/06/identity/claims/name'
|
||||
],
|
||||
email: [
|
||||
'email',
|
||||
'mail',
|
||||
'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress',
|
||||
'http://schemas.microsoft.com/ws/2008/06/identity/claims/emailaddress'
|
||||
],
|
||||
first_name: [
|
||||
'first_name',
|
||||
'firstname',
|
||||
'firstName',
|
||||
'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname',
|
||||
'http://schemas.microsoft.com/ws/2008/06/identity/claims/givenname'
|
||||
],
|
||||
last_name: [
|
||||
'last_name',
|
||||
'lastname',
|
||||
'lastName',
|
||||
'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname',
|
||||
'http://schemas.microsoft.com/ws/2008/06/identity/claims/surname'
|
||||
]
|
||||
}
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#external_groups' do
|
||||
let(:config_1) { described_class.new('saml1') }
|
||||
|
||||
|
|
|
|||
|
|
@ -2,9 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::OmniauthInitializer, feature_category: :system_access do
|
||||
include LoginHelpers
|
||||
|
||||
RSpec.describe Gitlab::OmniauthInitializer do
|
||||
let(:devise_config) { class_double(Devise) }
|
||||
|
||||
subject(:initializer) { described_class.new(devise_config) }
|
||||
|
|
@ -226,119 +224,6 @@ RSpec.describe Gitlab::OmniauthInitializer, feature_category: :system_access do
|
|||
subject.execute([shibboleth_config])
|
||||
end
|
||||
|
||||
context 'when SAML providers are configured' do
|
||||
it 'configures default args for a single SAML provider' do
|
||||
stub_omniauth_config(providers: [{ name: 'saml', args: { idp_sso_service_url: 'https://saml.example.com' } }])
|
||||
|
||||
expect(devise_config).to receive(:omniauth).with(
|
||||
:saml,
|
||||
{
|
||||
idp_sso_service_url: 'https://saml.example.com',
|
||||
attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
|
||||
}
|
||||
)
|
||||
|
||||
initializer.execute(Gitlab.config.omniauth.providers)
|
||||
end
|
||||
|
||||
context 'when configuration provides matching keys' do
|
||||
before do
|
||||
stub_omniauth_config(
|
||||
providers: [
|
||||
{
|
||||
name: 'saml',
|
||||
args: { idp_sso_service_url: 'https://saml.example.com', attribute_statements: { email: ['custom_attr'] } }
|
||||
}
|
||||
]
|
||||
)
|
||||
end
|
||||
|
||||
it 'merges arguments with user configuration preference' do
|
||||
expect(devise_config).to receive(:omniauth).with(
|
||||
:saml,
|
||||
{
|
||||
idp_sso_service_url: 'https://saml.example.com',
|
||||
attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
|
||||
.merge({ email: ['custom_attr'] })
|
||||
}
|
||||
)
|
||||
|
||||
initializer.execute(Gitlab.config.omniauth.providers)
|
||||
end
|
||||
|
||||
it 'merges arguments with defaults preference when invert_omniauth_args_merging is not enabled' do
|
||||
stub_feature_flags(invert_omniauth_args_merging: false)
|
||||
|
||||
expect(devise_config).to receive(:omniauth).with(
|
||||
:saml,
|
||||
{
|
||||
idp_sso_service_url: 'https://saml.example.com',
|
||||
attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
|
||||
}
|
||||
)
|
||||
|
||||
initializer.execute(Gitlab.config.omniauth.providers)
|
||||
end
|
||||
end
|
||||
|
||||
it 'configures defaults args for multiple SAML providers' do
|
||||
stub_omniauth_config(
|
||||
providers: [
|
||||
{ name: 'saml', args: { idp_sso_service_url: 'https://saml.example.com' } },
|
||||
{
|
||||
name: 'saml2',
|
||||
args: { strategy_class: 'OmniAuth::Strategies::SAML', idp_sso_service_url: 'https://saml2.example.com' }
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
expect(devise_config).to receive(:omniauth).with(
|
||||
:saml,
|
||||
{
|
||||
idp_sso_service_url: 'https://saml.example.com',
|
||||
attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
|
||||
}
|
||||
)
|
||||
expect(devise_config).to receive(:omniauth).with(
|
||||
:saml2,
|
||||
{
|
||||
idp_sso_service_url: 'https://saml2.example.com',
|
||||
strategy_class: OmniAuth::Strategies::SAML,
|
||||
attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
|
||||
}
|
||||
)
|
||||
|
||||
initializer.execute(Gitlab.config.omniauth.providers)
|
||||
end
|
||||
|
||||
it 'merges arguments with user configuration preference for custom SAML provider' do
|
||||
stub_omniauth_config(
|
||||
providers: [
|
||||
{
|
||||
name: 'custom_saml',
|
||||
args: {
|
||||
strategy_class: 'OmniAuth::Strategies::SAML',
|
||||
idp_sso_service_url: 'https://saml2.example.com',
|
||||
attribute_statements: { email: ['custom_attr'] }
|
||||
}
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
expect(devise_config).to receive(:omniauth).with(
|
||||
:custom_saml,
|
||||
{
|
||||
idp_sso_service_url: 'https://saml2.example.com',
|
||||
strategy_class: OmniAuth::Strategies::SAML,
|
||||
attribute_statements: ::Gitlab::Auth::Saml::Config.default_attribute_statements
|
||||
.merge({ email: ['custom_attr'] })
|
||||
}
|
||||
)
|
||||
|
||||
initializer.execute(Gitlab.config.omniauth.providers)
|
||||
end
|
||||
end
|
||||
|
||||
it 'configures defaults for google_oauth2' do
|
||||
google_config = {
|
||||
'name' => 'google_oauth2',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::BulkImportsUsersMetric, feature_category: :importers do
|
||||
let(:expected_value) { 3 }
|
||||
let(:expected_query) { "SELECT COUNT(DISTINCT \"bulk_imports\".\"user_id\") FROM \"bulk_imports\"" }
|
||||
|
||||
before_all do
|
||||
import = create :bulk_import, created_at: 3.days.ago
|
||||
create :bulk_import, created_at: 35.days.ago
|
||||
create :bulk_import, created_at: 3.days.ago
|
||||
create :bulk_import, created_at: 3.days.ago, user: import.user
|
||||
end
|
||||
|
||||
it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
|
||||
|
||||
it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
|
||||
let(:expected_value) { 2 }
|
||||
let(:start) { 30.days.ago.to_fs(:db) }
|
||||
let(:finish) { 2.days.ago.to_fs(:db) }
|
||||
let(:expected_query) do
|
||||
"SELECT COUNT(DISTINCT \"bulk_imports\".\"user_id\") FROM \"bulk_imports\" " \
|
||||
"WHERE \"bulk_imports\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CsvImportsUsersMetric, feature_category: :importers do
|
||||
let(:expected_value) { 3 }
|
||||
let(:expected_query) { "SELECT COUNT(DISTINCT \"csv_issue_imports\".\"user_id\") FROM \"csv_issue_imports\"" }
|
||||
|
||||
before_all do
|
||||
import = create :issue_csv_import, created_at: 3.days.ago
|
||||
create :issue_csv_import, created_at: 35.days.ago
|
||||
create :issue_csv_import, created_at: 3.days.ago
|
||||
create :issue_csv_import, created_at: 3.days.ago, user: import.user
|
||||
end
|
||||
|
||||
it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
|
||||
|
||||
it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
|
||||
let(:expected_value) { 2 }
|
||||
let(:start) { 30.days.ago.to_fs(:db) }
|
||||
let(:finish) { 2.days.ago.to_fs(:db) }
|
||||
let(:expected_query) do
|
||||
"SELECT COUNT(DISTINCT \"csv_issue_imports\".\"user_id\") FROM \"csv_issue_imports\" " \
|
||||
"WHERE \"csv_issue_imports\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::GroupImportsUsersMetric, feature_category: :importers do
|
||||
let(:expected_value) { 3 }
|
||||
let(:expected_query) { "SELECT COUNT(DISTINCT \"group_import_states\".\"user_id\") FROM \"group_import_states\"" }
|
||||
|
||||
before_all do
|
||||
import = create :group_import_state, created_at: 3.days.ago
|
||||
create :group_import_state, created_at: 35.days.ago
|
||||
create :group_import_state, created_at: 3.days.ago
|
||||
create :group_import_state, created_at: 3.days.ago, user: import.user
|
||||
end
|
||||
|
||||
it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
|
||||
|
||||
it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
|
||||
let(:expected_value) { 2 }
|
||||
let(:start) { 30.days.ago.to_fs(:db) }
|
||||
let(:finish) { 2.days.ago.to_fs(:db) }
|
||||
let(:expected_query) do
|
||||
"SELECT COUNT(DISTINCT \"group_import_states\".\"user_id\") FROM \"group_import_states\" " \
|
||||
"WHERE \"group_import_states\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::JiraImportsUsersMetric, feature_category: :importers do
|
||||
let(:expected_value) { 3 }
|
||||
let(:expected_query) { "SELECT COUNT(DISTINCT \"jira_imports\".\"user_id\") FROM \"jira_imports\"" }
|
||||
|
||||
before_all do
|
||||
import = create :jira_import_state, created_at: 3.days.ago
|
||||
create :jira_import_state, created_at: 35.days.ago
|
||||
create :jira_import_state, created_at: 3.days.ago
|
||||
create :jira_import_state, created_at: 3.days.ago, user: import.user
|
||||
end
|
||||
|
||||
it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
|
||||
|
||||
it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
|
||||
let(:expected_value) { 2 }
|
||||
let(:start) { 30.days.ago.to_fs(:db) }
|
||||
let(:finish) { 2.days.ago.to_fs(:db) }
|
||||
let(:expected_query) do
|
||||
"SELECT COUNT(DISTINCT \"jira_imports\".\"user_id\") FROM \"jira_imports\" " \
|
||||
"WHERE \"jira_imports\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::ProjectImportsCreatorsMetric, feature_category: :importers do
|
||||
let(:expected_value) { 3 }
|
||||
let(:expected_query) do
|
||||
"SELECT COUNT(DISTINCT \"projects\".\"creator_id\") FROM \"projects\" " \
|
||||
"WHERE \"projects\".\"import_type\" IS NOT NULL"
|
||||
end
|
||||
|
||||
before_all do
|
||||
project = create :project, import_type: :jira, created_at: 3.days.ago
|
||||
create :project, import_type: :jira, created_at: 35.days.ago
|
||||
create :project, import_type: :jira, created_at: 3.days.ago
|
||||
create :project, created_at: 3.days.ago
|
||||
create :project, import_type: :jira, created_at: 3.days.ago, creator: project.creator
|
||||
end
|
||||
|
||||
it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
|
||||
|
||||
it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
|
||||
let(:expected_value) { 2 }
|
||||
let(:start) { 30.days.ago.to_fs(:db) }
|
||||
let(:finish) { 2.days.ago.to_fs(:db) }
|
||||
let(:expected_query) do
|
||||
"SELECT COUNT(DISTINCT \"projects\".\"creator_id\") FROM \"projects\" WHERE " \
|
||||
"\"projects\".\"import_type\" IS NOT NULL AND \"projects\".\"created_at\" BETWEEN '#{start}' AND '#{finish}'"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::UniqueUsersAllImportsMetric, feature_category: :importers do
|
||||
let(:expected_value) { 6 }
|
||||
let(:expected_query) do
|
||||
<<~SQL.squish
|
||||
SELECT
|
||||
(SELECT COUNT(DISTINCT "projects"."creator_id") FROM "projects" WHERE "projects"."import_type" IS NOT NULL) +
|
||||
(SELECT COUNT(DISTINCT "bulk_imports"."user_id") FROM "bulk_imports") +
|
||||
(SELECT COUNT(DISTINCT "jira_imports"."user_id") FROM "jira_imports") +
|
||||
(SELECT COUNT(DISTINCT "csv_issue_imports"."user_id") FROM "csv_issue_imports") +
|
||||
(SELECT COUNT(DISTINCT "group_import_states"."user_id") FROM "group_import_states")
|
||||
SQL
|
||||
end
|
||||
|
||||
before_all do
|
||||
import = create :jira_import_state, created_at: 3.days.ago
|
||||
create :jira_import_state, created_at: 35.days.ago
|
||||
create :jira_import_state, created_at: 3.days.ago, user: import.user
|
||||
|
||||
create :group_import_state, created_at: 3.days.ago
|
||||
create :issue_csv_import, created_at: 3.days.ago
|
||||
create :bulk_import, created_at: 3.days.ago
|
||||
create :project, import_type: :jira, created_at: 3.days.ago
|
||||
end
|
||||
|
||||
before do
|
||||
described_class::IMPORTS_METRICS.each do |submetric_class|
|
||||
metric = submetric_class.new(time_frame: time_frame, options: options)
|
||||
allow(metric.send(:relation).connection).to receive(:transaction_open?).and_return(false)
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'a correct instrumented metric value and query', { time_frame: 'all' }
|
||||
|
||||
it_behaves_like 'a correct instrumented metric value and query', { time_frame: '28d' } do
|
||||
let(:expected_value) { 5 }
|
||||
let(:start) { 30.days.ago.to_fs(:db) }
|
||||
let(:finish) { 2.days.ago.to_fs(:db) }
|
||||
let(:expected_query) do
|
||||
<<~SQL.squish
|
||||
SELECT
|
||||
(SELECT COUNT(DISTINCT "projects"."creator_id") FROM "projects" WHERE "projects"."import_type" IS NOT NULL AND "projects"."created_at" BETWEEN '#{start}' AND '#{finish}') +
|
||||
(SELECT COUNT(DISTINCT "bulk_imports"."user_id") FROM "bulk_imports" WHERE "bulk_imports"."created_at" BETWEEN '#{start}' AND '#{finish}') +
|
||||
(SELECT COUNT(DISTINCT "jira_imports"."user_id") FROM "jira_imports" WHERE "jira_imports"."created_at" BETWEEN '#{start}' AND '#{finish}') +
|
||||
(SELECT COUNT(DISTINCT "csv_issue_imports"."user_id") FROM "csv_issue_imports" WHERE "csv_issue_imports"."created_at" BETWEEN '#{start}' AND '#{finish}') +
|
||||
(SELECT COUNT(DISTINCT "group_import_states"."user_id") FROM "group_import_states" WHERE "group_import_states"."created_at" BETWEEN '#{start}' AND '#{finish}')
|
||||
SQL
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -241,29 +241,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
|
|||
)
|
||||
end
|
||||
|
||||
it 'includes import gmau usage data' do
|
||||
for_defined_days_back do
|
||||
user = create(:user)
|
||||
group = create(:group)
|
||||
|
||||
group.add_owner(user)
|
||||
|
||||
create(:project, import_type: :github, creator_id: user.id)
|
||||
create(:jira_import_state, :finished, project: create(:project, creator_id: user.id))
|
||||
create(:issue_csv_import, user: user)
|
||||
create(:group_import_state, group: group, user: user)
|
||||
create(:bulk_import, user: user)
|
||||
end
|
||||
|
||||
expect(described_class.usage_activity_by_stage_manage({})).to include(
|
||||
unique_users_all_imports: 10
|
||||
)
|
||||
|
||||
expect(described_class.usage_activity_by_stage_manage(described_class.monthly_time_range_db_params)).to include(
|
||||
unique_users_all_imports: 5
|
||||
)
|
||||
end
|
||||
|
||||
it 'includes imports usage data', :clean_gitlab_redis_cache do
|
||||
for_defined_days_back do
|
||||
user = create(:user)
|
||||
|
|
|
|||
|
|
@ -155,6 +155,8 @@ RSpec.describe AwardEmoji, feature_category: :team_planning do
|
|||
end
|
||||
|
||||
it 'broadcasts updates on the note when destroyed' do
|
||||
award_emoji.save!
|
||||
|
||||
expect(note).to receive(:broadcast_noteable_notes_changed)
|
||||
expect(note).to receive(:trigger_note_subscription_update)
|
||||
|
||||
|
|
@ -185,6 +187,8 @@ RSpec.describe AwardEmoji, feature_category: :team_planning do
|
|||
end
|
||||
|
||||
it 'does not broadcast updates on the issue when destroyed' do
|
||||
award_emoji.save!
|
||||
|
||||
expect(issue).not_to receive(:broadcast_noteable_notes_changed)
|
||||
expect(issue).not_to receive(:trigger_note_subscription_update)
|
||||
|
||||
|
|
|
|||
|
|
@ -127,7 +127,7 @@ RSpec.describe SystemHook, feature_category: :webhooks do
|
|||
end
|
||||
|
||||
it 'group destroy hook' do
|
||||
group.destroy!
|
||||
create(:group).destroy!
|
||||
|
||||
expect(WebMock).to have_requested(:post, system_hook.url).with(
|
||||
body: /group_destroy/,
|
||||
|
|
|
|||
|
|
@ -34,7 +34,12 @@ RSpec.describe ProjectAuthorization, feature_category: :groups_and_projects do
|
|||
end
|
||||
|
||||
context 'with duplicate user and project authorization' do
|
||||
subject { project_auth.dup }
|
||||
subject do
|
||||
project_auth.dup.tap do |auth|
|
||||
auth.project = project_1
|
||||
auth.user = user
|
||||
end
|
||||
end
|
||||
|
||||
it { is_expected.to be_invalid }
|
||||
|
||||
|
|
@ -52,6 +57,8 @@ RSpec.describe ProjectAuthorization, feature_category: :groups_and_projects do
|
|||
context 'with multiple access levels for the same user and project' do
|
||||
subject do
|
||||
project_auth.dup.tap do |auth|
|
||||
auth.project = project_1
|
||||
auth.user = user
|
||||
auth.access_level = Gitlab::Access::MAINTAINER
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -238,6 +238,22 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
|
|||
expect(json_response.first['username']).to eq('a-user')
|
||||
expect(json_response.second['username']).to eq('a-user2')
|
||||
end
|
||||
|
||||
it 'preserves requested ordering with order_by and sort' do
|
||||
get api(path, user, admin_mode: true), params: { search: first_user.username, order_by: 'name', sort: 'desc' }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response.first['username']).to eq('a-user2')
|
||||
expect(json_response.second['username']).to eq('a-user')
|
||||
end
|
||||
|
||||
it 'preserves requested ordering with sort' do
|
||||
get api(path, user, admin_mode: true), params: { search: first_user.username, sort: 'desc' }
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response.first['username']).to eq('a-user2')
|
||||
expect(json_response.second['username']).to eq('a-user')
|
||||
end
|
||||
end
|
||||
|
||||
context 'N+1 queries' do
|
||||
|
|
@ -2950,6 +2966,39 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
|
|||
end
|
||||
end
|
||||
|
||||
describe "PUT /user/preferences" do
|
||||
let(:path) { '/user/preferences' }
|
||||
|
||||
context "when unauthenticated" do
|
||||
it "returns authentication error" do
|
||||
put api(path)
|
||||
expect(response).to have_gitlab_http_status(:unauthorized)
|
||||
end
|
||||
end
|
||||
|
||||
context "when authenticated" do
|
||||
it "updates user preferences" do
|
||||
user.user_preference.view_diffs_file_by_file = false
|
||||
user.user_preference.show_whitespace_in_diffs = true
|
||||
user.save!
|
||||
|
||||
put api(path, user), params: {
|
||||
view_diffs_file_by_file: true,
|
||||
show_whitespace_in_diffs: false
|
||||
}
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response["view_diffs_file_by_file"]).to eq(true)
|
||||
expect(json_response["show_whitespace_in_diffs"]).to eq(false)
|
||||
|
||||
user.reload
|
||||
|
||||
expect(json_response["view_diffs_file_by_file"]).to eq(user.view_diffs_file_by_file)
|
||||
expect(json_response["show_whitespace_in_diffs"]).to eq(user.show_whitespace_in_diffs)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "GET /user/keys" do
|
||||
subject(:request) { get api(path, user) }
|
||||
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ class ClickHouseTestRunner
|
|||
migrations_paths = ClickHouse::MigrationSupport::Migrator.migrations_paths
|
||||
schema_migration = ClickHouse::MigrationSupport::SchemaMigration
|
||||
migration_context = ClickHouse::MigrationSupport::MigrationContext.new(migrations_paths, schema_migration)
|
||||
migrate(nil, migration_context)
|
||||
migrate(migration_context, nil)
|
||||
|
||||
@ensure_schema = true
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module ClickHouseTestHelpers
|
||||
def migrate(target_version, migration_context)
|
||||
quietly { migration_context.up(target_version) }
|
||||
def migrate(migration_context, target_version, step = nil)
|
||||
quietly { migration_context.up(target_version, step) }
|
||||
end
|
||||
|
||||
def rollback(target_version, migration_context)
|
||||
quietly { migration_context.down(target_version) }
|
||||
def rollback(migration_context, target_version, step = 1)
|
||||
quietly { migration_context.down(target_version, step) }
|
||||
end
|
||||
|
||||
def table_names(database = :main, configuration = ClickHouse::Client.configuration)
|
||||
|
|
|
|||
|
|
@ -64,7 +64,11 @@ module EmailHelpers
|
|||
|
||||
def not_enqueue_mail_with(mailer_class, mail_method_name, *args)
|
||||
args.map! { |arg| arg.is_a?(ActiveRecord::Base) ? arg.id : arg }
|
||||
not_enqueue_mail(mailer_class, mail_method_name).with(*args)
|
||||
|
||||
matcher = have_enqueued_mail(mailer_class, mail_method_name).with(*args)
|
||||
description = proc { 'email has not been enqueued' }
|
||||
|
||||
RSpec::Matchers::AliasedNegatedMatcher.new(matcher, description)
|
||||
end
|
||||
|
||||
def have_only_enqueued_mail_with_args(mailer_class, mailer_method, *args)
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@ module Ci
|
|||
each_partitionable_table do |table_name|
|
||||
create_test_partition("p_#{table_name}", connection: connection)
|
||||
end
|
||||
ensure_builds_id_uniquness(connection: connection)
|
||||
end
|
||||
|
||||
def teardown(connection: Ci::ApplicationRecord.connection)
|
||||
|
|
@ -60,16 +59,6 @@ module Ci
|
|||
SQL
|
||||
end
|
||||
|
||||
# This can be removed after https://gitlab.com/gitlab-org/gitlab/-/issues/421173
|
||||
# is implemented
|
||||
def ensure_builds_id_uniquness(connection:)
|
||||
connection.execute(<<~SQL.squish)
|
||||
CREATE TRIGGER assign_p_ci_builds_id_trigger
|
||||
BEFORE INSERT ON #{full_partition_name('ci_builds')}
|
||||
FOR EACH ROW EXECUTE FUNCTION assign_p_ci_builds_id_value();
|
||||
SQL
|
||||
end
|
||||
|
||||
def table_available?(table_name, connection:)
|
||||
connection.table_exists?(table_name) &&
|
||||
connection.column_exists?(table_name, :partition_id)
|
||||
|
|
|
|||
|
|
@ -1,3 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec::Matchers.define_negated_matcher :not_enqueue_mail, :have_enqueued_mail
|
||||
|
|
@ -120,10 +120,8 @@ RSpec.shared_examples 'every metric definition' do
|
|||
end
|
||||
|
||||
let(:ignored_classes) do
|
||||
[
|
||||
Gitlab::Usage::Metrics::Instrumentations::IssuesWithAlertManagementAlertsMetric,
|
||||
Gitlab::Usage::Metrics::Instrumentations::IssuesWithPrometheusAlertEvents
|
||||
].freeze
|
||||
Gitlab::Usage::Metrics::Instrumentations::IssuesCreatedFromAlertsMetric::ISSUES_FROM_ALERTS_METRICS +
|
||||
Gitlab::Usage::Metrics::Instrumentations::UniqueUsersAllImportsMetric::IMPORTS_METRICS
|
||||
end
|
||||
|
||||
let(:removed_classes) do
|
||||
|
|
|
|||
|
|
@ -31,11 +31,13 @@ RSpec.shared_examples 'a correct instrumented metric query' do |params|
|
|||
end
|
||||
|
||||
before do
|
||||
allow(metric.send(:relation).connection).to receive(:transaction_open?).and_return(false)
|
||||
if metric.respond_to?(:relation, true) && metric.send(:relation).respond_to?(:connection)
|
||||
allow(metric.send(:relation).connection).to receive(:transaction_open?).and_return(false)
|
||||
end
|
||||
end
|
||||
|
||||
it 'has correct generate query' do
|
||||
expect(metric.to_sql).to eq(expected_query)
|
||||
expect(metric.instrumentation).to eq(expected_query)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -45,6 +45,8 @@ RSpec.shared_examples 'updates namespace statistics' do
|
|||
|
||||
context 'when deleting' do
|
||||
it 'schedules a statistic refresh' do
|
||||
statistic_source.save!
|
||||
|
||||
expect(Groups::UpdateStatisticsWorker)
|
||||
.to receive(:perform_async)
|
||||
|
||||
|
|
|
|||
|
|
@ -9,9 +9,11 @@ RSpec.describe 'gitlab:clickhouse', click_house: :without_migrations, feature_ca
|
|||
self.use_transactional_tests = false
|
||||
|
||||
let(:migrations_base_dir) { 'click_house/migrations' }
|
||||
let(:migrations_dirname) { '' }
|
||||
let(:migrations_dirname) { 'undefined' }
|
||||
let(:migrations_dir) { expand_fixture_path("#{migrations_base_dir}/#{migrations_dirname}") }
|
||||
let(:verbose) { nil }
|
||||
let(:target_version) { nil }
|
||||
let(:step) { nil }
|
||||
|
||||
before(:all) do
|
||||
Rake.application.rake_require 'tasks/gitlab/click_house/migration'
|
||||
|
|
@ -19,13 +21,13 @@ RSpec.describe 'gitlab:clickhouse', click_house: :without_migrations, feature_ca
|
|||
|
||||
before do
|
||||
stub_env('VERBOSE', verbose) if verbose
|
||||
stub_env('VERSION', target_version) if target_version
|
||||
stub_env('STEP', step.to_s) if step
|
||||
end
|
||||
|
||||
describe 'migrate' do
|
||||
subject(:migration) { run_rake_task('gitlab:clickhouse:migrate') }
|
||||
|
||||
let(:target_version) { nil }
|
||||
|
||||
around do |example|
|
||||
ClickHouse::MigrationSupport::Migrator.migrations_paths = [migrations_dir]
|
||||
|
||||
|
|
@ -34,10 +36,6 @@ RSpec.describe 'gitlab:clickhouse', click_house: :without_migrations, feature_ca
|
|||
clear_consts(expand_fixture_path(migrations_base_dir))
|
||||
end
|
||||
|
||||
before do
|
||||
stub_env('VERSION', target_version) if target_version
|
||||
end
|
||||
|
||||
describe 'when creating a table' do
|
||||
let(:migrations_dirname) { 'plain_table_creation' }
|
||||
|
||||
|
|
@ -67,21 +65,68 @@ RSpec.describe 'gitlab:clickhouse', click_house: :without_migrations, feature_ca
|
|||
|
||||
describe 'when dropping a table' do
|
||||
let(:migrations_dirname) { 'drop_table' }
|
||||
let(:target_version) { 2 }
|
||||
|
||||
it 'drops table' do
|
||||
stub_env('VERSION', 1)
|
||||
run_rake_task('gitlab:clickhouse:migrate')
|
||||
context 'with VERSION set' do
|
||||
let(:target_version) { 2 }
|
||||
|
||||
expect(table_names).to include('some')
|
||||
it 'drops table' do
|
||||
stub_env('VERSION', 1)
|
||||
run_rake_task('gitlab:clickhouse:migrate')
|
||||
|
||||
stub_env('VERSION', target_version)
|
||||
migration
|
||||
expect(table_names).not_to include('some')
|
||||
expect(table_names).to include('some')
|
||||
|
||||
stub_env('VERSION', target_version)
|
||||
migration
|
||||
expect(table_names).not_to include('some')
|
||||
end
|
||||
|
||||
context 'with STEP also set' do
|
||||
let(:step) { 1 }
|
||||
|
||||
it 'ignores STEP and executes both migrations' do
|
||||
migration
|
||||
|
||||
expect(table_names).not_to include('some')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with STEP set to 1' do
|
||||
let(:step) { 1 }
|
||||
|
||||
it 'executes only first step and creates table' do
|
||||
migration
|
||||
|
||||
expect(table_names).to include('some')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with STEP set to 0' do
|
||||
let(:step) { 0 }
|
||||
|
||||
it 'executes only first step and creates table' do
|
||||
expect { migration }.to raise_error ArgumentError, 'STEP should be a positive number'
|
||||
end
|
||||
end
|
||||
|
||||
context 'with STEP set to not-a-number' do
|
||||
let(:step) { 'NaN' }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { migration }.to raise_error ArgumentError, 'invalid value for Integer(): "NaN"'
|
||||
end
|
||||
end
|
||||
|
||||
context 'with STEP set to empty string' do
|
||||
let(:step) { '' }
|
||||
|
||||
it 'raises an error' do
|
||||
expect { migration }.to raise_error ArgumentError, 'invalid value for Integer(): ""'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'with VERSION is invalid' do
|
||||
context 'with VERSION is invalid' do
|
||||
let(:migrations_dirname) { 'plain_table_creation' }
|
||||
let(:target_version) { 'invalid' }
|
||||
|
||||
|
|
@ -92,29 +137,51 @@ RSpec.describe 'gitlab:clickhouse', click_house: :without_migrations, feature_ca
|
|||
describe 'rollback' do
|
||||
subject(:migration) { run_rake_task('gitlab:clickhouse:rollback') }
|
||||
|
||||
let(:schema_migration) { ClickHouse::MigrationSupport::SchemaMigration }
|
||||
let(:migrations_dirname) { 'table_creation_with_down_method' }
|
||||
|
||||
around do |example|
|
||||
ClickHouse::MigrationSupport::Migrator.migrations_paths = [migrations_dir]
|
||||
migrate(nil, ClickHouse::MigrationSupport::MigrationContext.new(migrations_dir, schema_migration))
|
||||
# Ensure we start with all migrations up
|
||||
schema_migration = ClickHouse::MigrationSupport::SchemaMigration
|
||||
migrate(ClickHouse::MigrationSupport::MigrationContext.new(migrations_dir, schema_migration), nil)
|
||||
|
||||
example.run
|
||||
|
||||
clear_consts(expand_fixture_path(migrations_base_dir))
|
||||
end
|
||||
|
||||
context 'when migrating back all the way to 0' do
|
||||
let(:target_version) { 0 }
|
||||
context 'with VERSION set' do
|
||||
context 'when migrating back all the way to 0' do
|
||||
let(:target_version) { 0 }
|
||||
|
||||
context 'when down method is present' do
|
||||
let(:migrations_dirname) { 'table_creation_with_down_method' }
|
||||
|
||||
it 'removes migration' do
|
||||
expect(table_names).to include('some')
|
||||
it 'rolls back all migrations' do
|
||||
expect(table_names).to include('some', 'another')
|
||||
|
||||
migration
|
||||
expect(table_names).not_to include('some')
|
||||
expect(table_names).not_to include('some', 'another')
|
||||
end
|
||||
|
||||
context 'with STEP also set' do
|
||||
let(:step) { 1 }
|
||||
|
||||
it 'ignores STEP and rolls back all migrations' do
|
||||
expect(table_names).to include('some', 'another')
|
||||
|
||||
migration
|
||||
expect(table_names).not_to include('some', 'another')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with STEP set to 1' do
|
||||
let(:step) { 1 }
|
||||
|
||||
it 'executes only first step and drops "another" table' do
|
||||
run_rake_task('gitlab:clickhouse:rollback')
|
||||
|
||||
expect(table_names).to include('some')
|
||||
expect(table_names).not_to include('another')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -89,6 +89,21 @@ RSpec.describe 'gitlab:ldap:secret rake tasks' do
|
|||
stub_env('EDITOR', nil)
|
||||
expect { run_rake_task('gitlab:ldap:secret:edit') }.to output(/No \$EDITOR specified to open file. Please provide one when running the command/).to_stderr
|
||||
end
|
||||
|
||||
it 'when $EDITOR contains multiple arguments' do
|
||||
stub_env('EDITOR', 'cat -v')
|
||||
|
||||
expect { run_rake_task('gitlab:ldap:secret:edit') }.to output(/File encrypted and saved./).to_stdout
|
||||
expect(File.exist?(ldap_secret_file)).to be true
|
||||
value = Settings.encrypted(ldap_secret_file)
|
||||
expect(value.read).to match(/password: '123'/)
|
||||
end
|
||||
|
||||
it 'when $EDITOR is set to a non-existent binary' do
|
||||
stub_env('EDITOR', "nothing-#{SecureRandom.hex}")
|
||||
|
||||
expect { run_rake_task('gitlab:ldap:secret:edit') }.to raise_error(/Unable to run \$EDITOR/)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'write' do
|
||||
|
|
|
|||
Loading…
Reference in New Issue