Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-02-23 18:09:16 +00:00
parent b465b2440b
commit f6b95a66bc
54 changed files with 1040 additions and 530 deletions

View File

@ -2,7 +2,7 @@
import { GlSprintf, GlLink, GlModalDirective } from '@gitlab/ui';
import { createAlert, VARIANT_SUCCESS } from '~/flash';
import { redirectTo, setUrlParams } from '~/lib/utils/url_utility';
import { __ } from '~/locale';
import { s__ } from '~/locale';
import RunnerInstructionsModal from '~/vue_shared/components/runner_instructions/runner_instructions_modal.vue';
import RunnerPlatformsRadioGroup from '~/ci/runner/components/runner_platforms_radio_group.vue';
import RunnerCreateForm from '~/ci/runner/components/runner_create_form.vue';
@ -38,7 +38,10 @@ export default {
{ [PARAM_KEY_PLATFORM]: this.platform },
runner.registerAdminUrl,
);
saveAlertToLocalStorage({ message: __('Runner created.'), variant: VARIANT_SUCCESS });
saveAlertToLocalStorage({
message: s__('Runners|Runner created.'),
variant: VARIANT_SUCCESS,
});
redirectTo(registerUrl);
},
onError(error) {

View File

@ -33,6 +33,8 @@ export default {
branchesPath: {
default: '',
},
showStatusChecks: { default: false },
showApprovers: { default: false },
},
apollo: {
project: {
@ -180,7 +182,7 @@ export default {
<!-- EE start -->
<!-- Approvals -->
<template v-if="approvalsHeader">
<template v-if="showApprovers">
<h4 class="gl-mb-1 gl-mt-5">{{ $options.i18n.approvalsTitle }}</h4>
<gl-sprintf :message="$options.i18n.approvalsDescription">
<template #link="{ content }">
@ -200,7 +202,7 @@ export default {
</template>
<!-- Status checks -->
<template v-if="statusChecksHeader">
<template v-if="showStatusChecks">
<h4 class="gl-mb-1 gl-mt-5">{{ $options.i18n.statusChecksTitle }}</h4>
<gl-sprintf :message="$options.i18n.statusChecksDescription">
<template #link="{ content }">

View File

@ -1,6 +1,7 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createDefaultClient from '~/lib/graphql';
import { parseBoolean } from '~/lib/utils/common_utils';
import View from 'ee_else_ce/projects/settings/branch_rules/components/view/index.vue';
export default function mountBranchRules(el) {
@ -20,6 +21,8 @@ export default function mountBranchRules(el) {
approvalRulesPath,
statusChecksPath,
branchesPath,
showStatusChecks,
showApprovers,
} = el.dataset;
return new Vue({
@ -31,6 +34,8 @@ export default function mountBranchRules(el) {
approvalRulesPath,
statusChecksPath,
branchesPath,
showStatusChecks: parseBoolean(showStatusChecks),
showApprovers: parseBoolean(showApprovers),
},
render(h) {
return h(View);

View File

@ -19,7 +19,6 @@ module CycleAnalyticsParams
@options ||= {}.tap do |opts|
opts[:current_user] = current_user
opts[:projects] = params[:project_ids] if params[:project_ids]
opts[:group] = params[:group_id] if params[:group_id]
opts[:from] = params[:from] || start_date(params)
opts[:to] = params[:to] if params[:to]
opts[:end_event_filter] = params[:end_event_filter] if params[:end_event_filter]
@ -78,5 +77,3 @@ module CycleAnalyticsParams
end
end
end
CycleAnalyticsParams.prepend_mod_with('CycleAnalyticsParams')

View File

@ -90,6 +90,7 @@ module ProductAnalyticsTracking
return true if MIGRATED_EVENTS.include?(event)
events_to_ff = {
g_edit_by_sfe: :_phase4,
g_compliance_dashboard: :_phase4
}

View File

@ -20,6 +20,11 @@ class Projects::Analytics::CycleAnalytics::StagesController < Projects::Applicat
@project.project_namespace
end
override :all_cycle_analytics_params
def all_cycle_analytics_params
super.merge({ namespace: @project.project_namespace })
end
override :cycle_analytics_configuration
def cycle_analytics_configuration(stages)
super(stages.select { |stage| permitted_stage?(stage) })

View File

@ -1,6 +1,7 @@
# frozen_string_literal: true
class Projects::Analytics::CycleAnalytics::SummaryController < Projects::ApplicationController
extend ::Gitlab::Utils::Override
include CycleAnalyticsParams
respond_to :json
@ -17,6 +18,11 @@ class Projects::Analytics::CycleAnalytics::SummaryController < Projects::Applica
private
override :all_cycle_analytics_params
def all_cycle_analytics_params
super.merge({ namespace: @project.project_namespace })
end
def project_level
@project_level ||= Analytics::CycleAnalytics::ProjectLevel.new(project: @project, options: options(allowed_params))
end

View File

@ -10,7 +10,7 @@ class Projects::BlobController < Projects::ApplicationController
include RedirectsForMissingPathOnTree
include SourcegraphDecorator
include DiffHelper
include RedisTracking
include ProductAnalyticsTracking
extend ::Gitlab::Utils::Override
prepend_before_action :authenticate_user!, only: [:edit]
@ -37,7 +37,11 @@ class Projects::BlobController < Projects::ApplicationController
before_action :validate_diff_params, only: :diff
before_action :set_last_commit_sha, only: [:edit, :update]
track_redis_hll_event :create, :update, name: 'g_edit_by_sfe'
track_custom_event :create, :update,
name: 'g_edit_by_sfe',
action: 'perform_sfe_action',
label: 'usage_activity_by_stage_monthly.create.action_monthly_active_users_sfe_edit',
destinations: [:redis_hll, :snowplow]
feature_category :source_code_management
urgency :low, [:create, :show, :edit, :update, :diff]
@ -316,6 +320,12 @@ class Projects::BlobController < Projects::ApplicationController
file = file.cdn_enabled_url(request.remote_ip) if file.respond_to?(:cdn_enabled_url)
file.url
end
alias_method :tracking_project_source, :project
def tracking_namespace_source
project&.namespace
end
end
Projects::BlobController.prepend_mod

View File

@ -44,7 +44,7 @@ class Projects::CycleAnalyticsController < Projects::ApplicationController
override :all_cycle_analytics_params
def all_cycle_analytics_params
super.merge({ project: @project, value_stream: @value_stream })
super.merge({ namespace: @project.project_namespace, value_stream: @value_stream })
end
def load_value_stream

View File

@ -1,29 +0,0 @@
# frozen_string_literal: true
module Analytics
module CycleAnalyticsHelper
def cycle_analytics_default_stage_config
Gitlab::Analytics::CycleAnalytics::DefaultStages.all.map do |stage_params|
Analytics::CycleAnalytics::StagePresenter.new(stage_params)
end
end
def cycle_analytics_initial_data(project, group = nil)
base_data = { project_id: project.id, group_path: project.group&.path, request_path: project_cycle_analytics_path(project), full_path: project.full_path }
svgs = { empty_state_svg_path: image_path("illustrations/analytics/cycle-analytics-empty-chart.svg"), no_data_svg_path: image_path("illustrations/analytics/cycle-analytics-empty-chart.svg"), no_access_svg_path: image_path("illustrations/analytics/no-access.svg") }
api_paths = group.present? ? cycle_analytics_group_api_paths(group) : cycle_analytics_project_api_paths(project)
base_data.merge(svgs, api_paths)
end
private
def cycle_analytics_group_api_paths(group)
{ milestones_path: group_milestones_path(group, format: :json), labels_path: group_labels_path(group, format: :json), group_path: group_path(group), group_id: group&.id }
end
def cycle_analytics_project_api_paths(project)
{ milestones_path: project_milestones_path(project, format: :json), labels_path: project_labels_path(project, format: :json), group_path: project.parent&.path, group_id: project.parent&.id }
end
end
end

View File

@ -757,7 +757,7 @@ module ProjectsHelper
end
def show_visibility_confirm_modal?(project)
project.unlink_forks_upon_visibility_decrease_enabled? && project.visibility_level > Gitlab::VisibilityLevel::PRIVATE && project.forks_count > 0
project.visibility_level > Gitlab::VisibilityLevel::PRIVATE && project.forks_count > 0
end
def confirm_reduce_visibility_message(project)

View File

@ -450,15 +450,13 @@ module Ci
values[:executor_type] = EXECUTOR_NAME_TO_TYPES.fetch(values.delete(:executor), :unknown)
end
new_version = values[:version]
schedule_runner_version_update(new_version) if new_version && values[:version] != version
cache_attributes(values)
# We save data without validation, it will always change due to `contacted_at`
if persist_cached_data?
version_updated = values.include?(:version) && values[:version] != version
update_columns(values)
schedule_runner_version_update if version_updated
end
update_columns(values) if persist_cached_data?
end
end
@ -603,10 +601,10 @@ module Ci
# TODO Remove in 16.0 when runners are known to send a system_id
# For now, heartbeats with version updates might result in two Sidekiq jobs being queued if a runner has a system_id
# This is not a problem since the jobs are deduplicated on the version
def schedule_runner_version_update
return unless version
def schedule_runner_version_update(new_version)
return unless new_version
Ci::Runners::ProcessRunnerVersionUpdateWorker.perform_async(version)
Ci::Runners::ProcessRunnerVersionUpdateWorker.perform_async(new_version)
end
end
end

View File

@ -57,12 +57,11 @@ module Ci
values[:executor_type] = Ci::Runner::EXECUTOR_NAME_TO_TYPES.fetch(values.delete(:executor), :unknown)
end
version_changed = values.include?(:version) && values[:version] != version
new_version = values[:version]
schedule_runner_version_update(new_version) if new_version && values[:version] != version
cache_attributes(values)
schedule_runner_version_update if version_changed
# We save data without validation, it will always change due to `contacted_at`
update_columns(values) if persist_cached_data?
end
@ -79,10 +78,10 @@ module Ci
(Time.current - real_contacted_at) >= contacted_at_max_age
end
def schedule_runner_version_update
return unless version
def schedule_runner_version_update(new_version)
return unless new_version
Ci::Runners::ProcessRunnerVersionUpdateWorker.perform_async(version)
Ci::Runners::ProcessRunnerVersionUpdateWorker.perform_async(new_version)
end
end
end

View File

@ -128,7 +128,6 @@ class Project < ApplicationRecord
after_create -> { create_or_load_association(:pages_metadatum) }
after_create :set_timestamps_for_create
after_create :check_repository_absence!
after_update :update_forks_visibility_level
before_destroy :remove_private_deploy_keys
after_destroy :remove_exports
after_save :update_project_statistics, if: :saved_change_to_namespace_id?
@ -1154,10 +1153,6 @@ class Project < ApplicationRecord
{ scope: :project, status: auto_devops&.enabled || Feature.enabled?(:force_autodevops_on_by_default, self) }
end
def unlink_forks_upon_visibility_decrease_enabled?
Feature.enabled?(:unlink_fork_network_upon_visibility_decrease, self)
end
# LFS and hashed repository storage are required for using Design Management.
def design_management_enabled?
lfs_enabled? && hashed_storage?(:repository)
@ -1940,19 +1935,6 @@ class Project < ApplicationRecord
create_repository(force: true) unless repository_exists?
end
# update visibility_level of forks
def update_forks_visibility_level
return if unlink_forks_upon_visibility_decrease_enabled?
return unless visibility_level < visibility_level_before_last_save
forks.each do |forked_project|
if forked_project.visibility_level > visibility_level
forked_project.visibility_level = visibility_level
forked_project.save!
end
end
end
def allowed_to_share_with_group?
!namespace.share_with_group_lock
end

View File

@ -1,6 +1,5 @@
- page_title _("Value Stream Analytics")
- data_attributes = @request_params.valid? ? @request_params.to_data_attributes : {}
- data_attributes.merge!(cycle_analytics_initial_data(@project, @group))
- add_page_specific_style 'page_bundles/cycle_analytics'
#js-cycle-analytics{ data: data_attributes }

View File

@ -1,6 +1,8 @@
- add_to_breadcrumbs _('Repository Settings'), project_settings_repository_path(@project)
- page_title s_('BranchRules|Branch rules details')
- show_status_checks = @project.licensed_feature_available?(:external_status_checks)
- show_approvers = @project.licensed_feature_available?(:merge_request_approvers)
%h3.gl-mb-5= s_('BranchRules|Branch rules details')
#js-branch-rules{ data: { project_path: @project.full_path, protected_branches_path: project_settings_repository_path(@project, anchor: 'js-protected-branches-settings'), approval_rules_path: project_settings_merge_requests_path(@project, anchor: 'js-merge-request-approval-settings'), status_checks_path: project_settings_merge_requests_path(@project, anchor: 'js-merge-request-settings'), branches_path: project_branches_path(@project) } }
#js-branch-rules{ data: { project_path: @project.full_path, protected_branches_path: project_settings_repository_path(@project, anchor: 'js-protected-branches-settings'), approval_rules_path: project_settings_merge_requests_path(@project, anchor: 'js-merge-request-approval-settings'), status_checks_path: project_settings_merge_requests_path(@project, anchor: 'js-merge-request-settings'), branches_path: project_branches_path(@project), show_status_checks: show_status_checks.to_s, show_approvers: show_approvers.to_s } }

View File

@ -31,14 +31,13 @@
%ul
- if label.project_label? && label.project.group && can?(current_user, :admin_label, label.project.group)
%li
= render Pajamas::ButtonComponent.new(category: :tertiary,
= render Pajamas::ButtonComponent.new(category: :tertiary, variant: :link,
button_options: { class: 'js-promote-project-label-button', data: { url: promote_project_label_path(label.project, label), label_title: label.title, label_color: label.color, label_text_color: label.text_color, group_name: label.project.group.name } }) do
= _('Promote to group label')
%li
%span
= render Pajamas::ButtonComponent.new(category: :tertiary,
button_options: { class: 'text-danger js-delete-label-modal-button', data: { label_name: label.name, subject_name: label.subject_name, destroy_path: label.destroy_path } }) do
= _('Delete')
= render Pajamas::ButtonComponent.new(category: :tertiary, variant: :link,
button_options: { class: 'text-danger js-delete-label-modal-button', data: { label_name: label.name, subject_name: label.subject_name, destroy_path: label.destroy_path } }) do
= _('Delete')
- if current_user
%li.gl-display-inline-block.label-subscription.js-label-subscription.gl-ml-3
- if label.can_subscribe_to_label_in_different_levels?

View File

@ -1,8 +0,0 @@
---
name: unlink_fork_network_upon_visibility_decrease
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/20466
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/369438
milestone: '12.6'
type: development
group: group::source code
default_enabled: true

View File

@ -5,6 +5,7 @@ require 'gitlab/redis'
Redis.raise_deprecations = true unless Rails.env.production?
Redis::Client.prepend(Gitlab::Instrumentation::RedisInterceptor)
Redis::Cluster::NodeLoader.prepend(Gitlab::Patch::NodeLoader)
# Make sure we initialize a Redis connection pool before multi-threaded
# execution starts by

View File

@ -19,7 +19,7 @@ end-to-end duration of a pipeline.
On GitLab.com:
- CI/CD minutes quotas are enabled for both public and private projects, but public
- CI/CD minutes quotas are enabled for all projects, but certain
projects [consume CI/CD minutes at a slower rate](#cost-factor).
- The base monthly CI/CD minutes quota for a GitLab.com [namespace](../../user/namespace/index.md)
is determined by its [license tier](https://about.gitlab.com/pricing/).
@ -201,13 +201,12 @@ can be higher than the end-to-end duration of a pipeline.
The cost factors for jobs running on shared runners on GitLab.com are:
- `1` for internal and private projects.
- `0.5` for public projects in the [GitLab for Open Source program](../../subscriptions/index.md#gitlab-for-open-source).
- `0.008` for public forks of public projects in the [GitLab for Open Source program](../../subscriptions/index.md#gitlab-for-open-source). For every 125 minutes of job execution time,
- `1` for internal, public, and private projects.
- Exceptions for public projects:
- `0.5` for projects in the [GitLab for Open Source program](../../subscriptions/index.md#gitlab-for-open-source).
- `0.008` for forks of projects in the [GitLab for Open Source program](../../subscriptions/index.md#gitlab-for-open-source). For every 125 minutes of job execution time,
you use 1 CI/CD minute.
- `1` for other public projects, after October 1, 2022 (previously `0.04`).
For every 1 minute of job execution time, you use 1 CI/CD minute.
- Calculated differently for [community contributions to GitLab projects](#cost-factor-for-community-contributions-to-gitlab-projects).
- Discounted dynamically for [community contributions to GitLab projects](#cost-factor-for-community-contributions-to-gitlab-projects).
The cost factors on self-managed instances are:

View File

@ -34,10 +34,13 @@ Background migrations can help when:
- Populating one column based on JSON stored in another column.
- Migrating data that depends on the output of external services. (For example, an API.)
NOTE:
If the batched background migration is part of an important upgrade, it must be announced
in the release post. Discuss with your Project Manager if you're unsure if the migration falls
into this category.
### Notes
- If the batched background migration is part of an important upgrade, it must be announced
in the release post. Discuss with your Project Manager if you're unsure if the migration falls
into this category.
- You should use the [generator](#generator) to create batched background migrations,
so that required files are created by default.
## Isolation
@ -311,6 +314,22 @@ NOTE:
When applying additional filters, it is important to ensure they are properly covered by an index to optimize `EachBatch` performance.
In the example above we need an index on `(type, id)` to support the filters. See [the `EachBatch` documentation for more information](iterating_tables_in_batches.md).
## Generator
The custom generator `batched_background_migration` scaffolds necessary files and
accepts `table_name`, `column_name`, and `feature_category` as arguments. Usage:
```shell
bundle exec rails g batched_background_migration my_batched_migration --table_name=<table-name> --column_name=<column-name> --feature_category=<feature-category>
```
This command creates these files:
- `db/post_migrate/20230214231008_queue_my_batched_migration.rb`
- `spec/migrations/20230214231008_queue_my_batched_migration_spec.rb`
- `lib/gitlab/background_migration/my_batched_migration.rb`
- `spec/lib/gitlab/background_migration/my_batched_migration_spec.rb`
## Example
The `routes` table has a `source_type` field that's used for a polymorphic relationship.
@ -319,8 +338,13 @@ the work is migrating data from the `source_id` column into a new singular forei
Because we intend to delete old rows later, there's no need to update them as part of the
background migration.
1. Start by defining our migration class, which should inherit
from `Gitlab::BackgroundMigration::BatchedMigrationJob`:
1. Start by using the generator to create batched background migration files:
```shell
bundle exec rails g batched_background_migration BackfillRouteNamespaceId --table_name=routes --column_name=id --feature_category=source_code_management
```
1. Update the migration job (subclass of `BatchedMigrationJob`) to copy `source_id` values to `namespace_id`:
```ruby
class Gitlab::BackgroundMigration::BackfillRouteNamespaceId < BatchedMigrationJob
@ -344,10 +368,10 @@ background migration.
```
NOTE:
Job classes must be subclasses of `BatchedMigrationJob` to be
Job classes inherit from `BatchedMigrationJob` to ensure they are
correctly handled by the batched migration framework. Any subclass of
`BatchedMigrationJob` is initialized with necessary arguments to
execute the batch, as well as a connection to the tracking database.
`BatchedMigrationJob` is initialized with the necessary arguments to
execute the batch, and a connection to the tracking database.
1. Create a database migration that adds a new trigger to the database. Example:
@ -380,12 +404,14 @@ background migration.
end
```
1. Create a post-deployment migration that queues the migration for existing data:
1. Update the created post-deployment migration with required delay and batch sizes:
```ruby
class QueueBackfillRoutesNamespaceId < Gitlab::Database::Migration[2.1]
MIGRATION = 'BackfillRouteNamespaceId'
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
restrict_gitlab_migration gitlab_schema: :gitlab_main
@ -394,7 +420,9 @@ background migration.
MIGRATION,
:routes,
:id,
job_interval: DELAY_INTERVAL
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end

View File

@ -26,6 +26,7 @@ feature_categories:
description: Represents a Terraform state backend
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/26619
milestone: '13.0'
gitlab_schema: gitlab_main
```
## Adding tables

View File

@ -90,8 +90,8 @@ To restrict group access by IP address:
Keep in mind that restricting group access by IP address has the following implications:
- Administrators and group owners can access group settings from any IP address, regardless of IP restriction. However:
- Group owners can access the subgroups, but not the projects belonging to the group or subgroups, when accessing from a disallowed IP address.
- Administrators and group Owners can access group settings from any IP address, regardless of IP restriction. However:
- Group Owners can access the subgroups, but not the projects belonging to the group or subgroups, when accessing from a disallowed IP address.
- Administrators can access projects belonging to the group when accessing from a disallowed IP address.
Access to projects includes cloning code from them.
- Users can still see group and project names and hierarchies. Only the following are restricted:
@ -181,12 +181,12 @@ prevent a project from being shared with other groups:
1. Select **Projects in `<group_name>` cannot be shared with other groups**.
1. Select **Save changes**.
This setting applies to all subgroups unless overridden by a group owner. Groups already
This setting applies to all subgroups unless overridden by a group Owner. Groups already
added to a project lose access when the setting is enabled.
## Prevent users from requesting access to a group
As a group owner, you can prevent non-members from requesting access to
As a group Owner, you can prevent non-members from requesting access to
your group.
1. On the top bar, **Main menu > Groups** and find your group.
@ -221,13 +221,13 @@ Existing forks are not removed.
## Prevent members from being added to projects in a group **(PREMIUM)**
As a group owner, you can prevent any new project membership for all
As a group Owner, you can prevent any new project membership for all
projects in a group, allowing tighter control over project membership.
For example, if you want to lock the group for an [Audit Event](../../administration/audit_events.md),
you can guarantee that project membership cannot be modified during the audit.
If group membership lock is enabled, the group owner can still:
If group membership lock is enabled, the group Owner can still:
- Invite groups or add members to groups to give them access to projects in the **locked** group.
- Change the role of group members.

View File

@ -394,11 +394,11 @@ You can also [drag issues](#move-issues-and-lists) to change their position and
![Drag issues between swimlanes](img/epics_swimlanes_drag_and_drop.png)
## Work In Progress limits **(PREMIUM)**
## Work in progress limits **(PREMIUM)**
> Moved to GitLab Premium in 13.9.
You can set a Work In Progress (WIP) limit for each issue list on an issue board. When a limit is
You can set a work in progress (WIP) limit for each issue list on an issue board. When a limit is
set, the list's header shows the number of issues in the list and the soft limit of issues.
You cannot set a WIP limit on the default lists (**Open** and **Closed**).
@ -413,11 +413,11 @@ Prerequisites:
- You must have at least the Reporter role for the project.
To set a WIP limit for a list:
To set a WIP limit for a list, in an issue board:
1. Navigate to a Project or Group board of which you're a member.
1. Select the settings icon in a list's header.
1. Next to **Work In Progress Limit**, select **Edit**.
1. On the top of the list you want to edit, select **List actions** (**{ellipsis_v}**) **> Edit list settings**.
The list settings sidebar opens on the right.
1. Next to **Work in progress Limit**, select **Edit**.
1. Enter the maximum number of issues.
1. Press <kbd>Enter</kbd> to save.
@ -493,10 +493,10 @@ Prerequisites:
To remove a list from an issue board:
1. On the top of the list you want to remove, select the **List settings** icon (**{settings}**).
1. On the top of the list you want to remove, select **List actions** (**{ellipsis_v}**).
The list settings sidebar opens on the right.
1. Select **Remove list**. A confirmation dialog appears.
1. Select **OK**.
1. Select **Remove list** again.
### Add issues to a list

View File

@ -38,13 +38,12 @@ module Gitlab
attribute :created_after, :datetime
attribute :created_before, :datetime
attribute :group
attribute :namespace
attribute :current_user
attribute :value_stream
attribute :sort
attribute :direction
attribute :page
attribute :project
attribute :stage_id
attribute :end_event_filter
@ -66,10 +65,6 @@ module Gitlab
self.end_event_filter ||= Gitlab::Analytics::CycleAnalytics::BaseQueryBuilder::DEFAULT_END_EVENT_FILTER
end
def project_ids
Array(@project_ids)
end
def to_data_collector_params
{
current_user: current_user,
@ -86,12 +81,9 @@ module Gitlab
def to_data_attributes
{}.tap do |attrs|
attrs[:aggregation] = aggregation_attributes if group
attrs[:group] = group_data_attributes if group
attrs[:value_stream] = value_stream_data_attributes.to_json if value_stream
attrs[:created_after] = created_after.to_date.iso8601
attrs[:created_before] = created_before.to_date.iso8601
attrs[:projects] = group_projects(project_ids) if group && project_ids.present?
attrs[:labels] = label_name.to_json if label_name.present?
attrs[:assignees] = assignee_username.to_json if assignee_username.present?
attrs[:author] = author_username if author_username.present?
@ -99,35 +91,63 @@ module Gitlab
attrs[:sort] = sort if sort.present?
attrs[:direction] = direction if direction.present?
attrs[:stage] = stage_data_attributes.to_json if stage_id.present?
attrs[:namespace] = namespace_attributes
attrs[:enable_tasks_by_type_chart] = false
attrs[:default_stages] = Gitlab::Analytics::CycleAnalytics::DefaultStages.all.map do |stage_params|
::Analytics::CycleAnalytics::StagePresenter.new(stage_params)
end.to_json
attrs.merge!(foss_project_level_params, resource_paths)
end
end
def project_ids
Array(@project_ids)
end
private
def use_aggregated_backend?
# for now it's only available on the group-level
group.present?
end
delegate :url_helpers, to: Gitlab::Routing
def foss_project_level_params
return {} unless project
def aggregation_attributes
{
enabled: aggregation.enabled.to_s,
last_run_at: aggregation.last_incremental_run_at&.iso8601,
next_run_at: aggregation.estimated_next_run_at&.iso8601
project_id: project.id,
group_path: project.group&.path,
request_path: url_helpers.project_cycle_analytics_path(project),
full_path: project.full_path
}
end
def aggregation
@aggregation ||= ::Analytics::CycleAnalytics::Aggregation.safe_create_for_namespace(group)
def resource_paths
helpers = ActionController::Base.helpers
{}.tap do |paths|
paths[:empty_state_svg_path] = helpers.image_path("illustrations/analytics/cycle-analytics-empty-chart.svg")
paths[:no_data_svg_path] = helpers.image_path("illustrations/analytics/cycle-analytics-empty-chart.svg")
paths[:no_access_svg_path] = helpers.image_path("illustrations/analytics/no-access.svg")
if project
paths[:milestones_path] = url_helpers.project_milestones_path(project, format: :json)
paths[:labels_path] = url_helpers.project_labels_path(project, format: :json)
end
end
end
def group_data_attributes
# FOSS version doesn't use the aggregated VSA backend
def use_aggregated_backend?
false
end
def namespace_attributes
container = project || namespace
return {} unless container
{
id: group.id,
namespace_id: group.id,
name: group.name,
full_path: group.full_path,
avatar_url: group.avatar_url
name: container.name,
full_path: container.full_path,
avatar_url: container.avatar_url
}
end
@ -139,28 +159,6 @@ module Gitlab
}
end
def group_projects(project_ids)
GroupProjectsFinder.new(
group: group,
current_user: current_user,
options: { include_subgroups: true },
project_ids_relation: project_ids
)
.execute
.with_route
.map { |project| project_data_attributes(project) }
.to_json
end
def project_data_attributes(project)
{
id: project.to_gid.to_s,
name: project.name,
path_with_namespace: project.path_with_namespace,
avatar_url: project.avatar_url
}
end
def stage_data_attributes
return unless stage
@ -196,10 +194,18 @@ module Gitlab
return unless value_stream
strong_memoize(:stage) do
::Analytics::CycleAnalytics::StageFinder.new(parent: project&.project_namespace || group, stage_id: stage_id).execute if stage_id
::Analytics::CycleAnalytics::StageFinder.new(parent: namespace, stage_id: stage_id).execute if stage_id
end
end
def project
strong_memoize(:project) do
namespace.project if namespace.is_a?(Namespaces::ProjectNamespace)
end
end
end
end
end
end
Gitlab::Analytics::CycleAnalytics::RequestParams.prepend_mod_with('Gitlab::Analytics::CycleAnalytics::RequestParams')

View File

@ -121,6 +121,16 @@ module Gitlab
key_name = data['table_name'] || data['view_name']
# rubocop:disable Gitlab/DocUrl
if data['gitlab_schema'].nil?
raise(
UnknownSchemaError,
"#{file_path} must specify a valid gitlab_schema for #{key_name}." \
"See https://docs.gitlab.com/ee/development/database/database_dictionary.html"
)
end
# rubocop:enable Gitlab/DocUrl
dic[key_name] = data['gitlab_schema'].to_sym
end
end

View File

@ -10,6 +10,8 @@ module Gitlab
# See https://www.postgresql.org/message-id/16934.1568989957%40sss.pgh.pa.us
EXPECTED_TRIGGER_RECORD_COUNT = 3
# table_name can include schema name as a prefix. For example: 'gitlab_partitions_static.events_03',
# otherwise, it will default to current used schema, for example 'public'.
def initialize(table_name:, connection:, database_name:, with_retries: true, logger: nil, dry_run: false)
@table_name = table_name
@connection = connection

View File

@ -7,6 +7,8 @@ module Gitlab
belongs_to :postgres_partitioned_table, foreign_key: 'parent_identifier', primary_key: 'identifier'
# identifier includes the partition schema.
# For example 'gitlab_partitions_static.events_03', or 'gitlab_partitions_dynamic.logs_03'
scope :for_identifier, ->(identifier) do
unless identifier =~ Gitlab::Database::FULLY_QUALIFIED_IDENTIFIER
raise ArgumentError, "Partition name is not fully qualified with a schema: #{identifier}"
@ -19,8 +21,12 @@ module Gitlab
for_identifier(identifier).first!
end
scope :for_parent_table, ->(name) do
where("parent_identifier = concat(current_schema(), '.', ?)", name).order(:name)
scope :for_parent_table, ->(parent_table) do
if parent_table =~ Database::FULLY_QUALIFIED_IDENTIFIER
where(parent_identifier: parent_table).order(:name)
else
where("parent_identifier = concat(current_schema(), '.', ?)", parent_table).order(:name)
end
end
def self.partition_exists?(table_name)

View File

@ -16,11 +16,13 @@ module Gitlab
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/366834
next if schema_name.in? GITLAB_SCHEMAS_TO_IGNORE
lock_writes_manager(table_name, connection, database_name).unlock_writes
unlock_writes_on_table(table_name, connection, database_name)
end
end
end
# It locks the tables on the database where they don't belong. Also it unlocks the tables
# on the database where they belong
def lock_writes
Gitlab::Database::EachDatabase.each_database_connection(include_shared: false) do |connection, database_name|
schemas_for_connection = Gitlab::Database.gitlab_schemas_for_connection(connection)
@ -30,9 +32,9 @@ module Gitlab
next if schema_name.in? GITLAB_SCHEMAS_TO_IGNORE
if schemas_for_connection.include?(schema_name)
lock_writes_manager(table_name, connection, database_name).unlock_writes
unlock_writes_on_table(table_name, connection, database_name)
else
lock_writes_manager(table_name, connection, database_name).lock_writes
lock_writes_on_table(table_name, connection, database_name)
end
end
end
@ -40,6 +42,24 @@ module Gitlab
private
# Unlocks the writes on the table and its partitions
def unlock_writes_on_table(table_name, connection, database_name)
lock_writes_manager(table_name, connection, database_name).unlock_writes
table_attached_partitions(table_name, connection) do |postgres_partition|
lock_writes_manager(postgres_partition.identifier, connection, database_name).unlock_writes
end
end
# It locks the writes on the table and its partitions
def lock_writes_on_table(table_name, connection, database_name)
lock_writes_manager(table_name, connection, database_name).lock_writes
table_attached_partitions(table_name, connection) do |postgres_partition|
lock_writes_manager(postgres_partition.identifier, connection, database_name).lock_writes
end
end
def tables_to_lock(connection, &block)
Gitlab::Database::GitlabSchema.tables_to_schema.each(&block)
@ -50,6 +70,14 @@ module Gitlab
end
end
def table_attached_partitions(table_name, connection, &block)
Gitlab::Database::SharedModel.using_connection(connection) do
break unless Gitlab::Database::PostgresPartitionedTable.find_by_name_in_current_schema(table_name)
Gitlab::Database::PostgresPartitionedTable.each_partition(table_name, &block)
end
end
def lock_writes_manager(table_name, connection, database_name)
Gitlab::Database::LockWritesManager.new(
table_name: table_name,

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
# Patch to address https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/2212#note_1287996694
# It uses hostname instead of IP address if the former is present in `CLUSTER NODES` output.
if Gem::Version.new(Redis::VERSION) > Gem::Version.new('4.8.1')
raise 'New version of redis detected, please remove or update this patch'
end
module Gitlab
module Patch
module NodeLoader
def self.prepended(base)
base.class_eval do
# monkey-patches https://github.com/redis/redis-rb/blob/v4.8.0/lib/redis/cluster/node_loader.rb#L23
def self.fetch_node_info(node)
node.call(%i[cluster nodes]).split("\n").map(&:split).to_h do |arr|
[
extract_host_identifier(arr[1]),
(arr[2].split(',') & %w[master slave]).first # rubocop:disable Naming/InclusiveLanguage
]
end
end
# Since `CLUSTER SLOT` uses the preferred endpoint determined by
# the `cluster-preferred-endpoint-type` config value, we will prefer hostname over IP address.
# See https://redis.io/commands/cluster-nodes/ for details on the output format.
#
# @param [String] Address info matching fhe format: <ip:port@cport[,hostname[,auxiliary_field=value]*]>
def self.extract_host_identifier(node_address)
ip_chunk, hostname, _auxiliaries = node_address.split(',')
return ip_chunk.split('@').first if hostname.blank?
port = ip_chunk.split('@').first.split(':')[1]
"#{hostname}:#{port}"
end
end
end
end
end
end

View File

@ -86,7 +86,9 @@ module Sidebars
feature_is_enabled = enabled_for_user || enabled_for_group || enabled_for_project
user_has_permissions = can?(context.current_user, :admin_project_google_cloud, context.project)
unless feature_is_enabled && user_has_permissions
google_oauth2_configured = google_oauth2_configured?
unless feature_is_enabled && user_has_permissions && google_oauth2_configured
return ::Sidebars::NilMenuItem.new(item_id: :incubation_5mp_google_cloud)
end
@ -103,6 +105,11 @@ module Sidebars
item_id: :google_cloud
)
end
def google_oauth2_configured?
config = Gitlab::Auth::OAuth::Provider.config_for('google_oauth2')
config&.present? && config.app_id.present? && config.app_secret.present?
end
end
end
end

View File

@ -36913,9 +36913,6 @@ msgstr ""
msgid "Runner API"
msgstr ""
msgid "Runner created."
msgstr ""
msgid "Runner tokens"
msgstr ""
@ -37342,6 +37339,9 @@ msgstr ""
msgid "Runners|Runner authentication tokens will expire based on a set interval. They will automatically rotate once expired."
msgstr ""
msgid "Runners|Runner created."
msgstr ""
msgid "Runners|Runner description"
msgstr ""

View File

@ -55,8 +55,8 @@
"@gitlab/at.js": "1.5.7",
"@gitlab/favicon-overlay": "2.0.0",
"@gitlab/fonts": "^1.2.0",
"@gitlab/svgs": "3.20.0",
"@gitlab/ui": "56.0.0",
"@gitlab/svgs": "3.21.0",
"@gitlab/ui": "56.1.1",
"@gitlab/visual-review-tools": "1.7.3",
"@gitlab/web-ide": "0.0.1-dev-20230216131813",
"@rails/actioncable": "6.1.4-7",

View File

@ -0,0 +1,8 @@
install:
image: maven:3.6-jdk-11
script:
- "mvn install -U -s settings.xml"
only:
- "<%= imported_project.default_branch %>"
tags:
- "runner-for-<%= imported_project.group.name %>"

View File

@ -0,0 +1,23 @@
<settings>
<servers>
<server>
<id>central-proxy</id>
<configuration>
<httpHeaders>
<property>
<name>Private-Token</name>
<value><%= personal_access_token %></value>
</property>
</httpHeaders>
</configuration>
</server>
</servers>
<mirrors>
<mirror>
<id>central-proxy</id>
<name>GitLab proxy of central repo</name>
<url><%= gitlab_address_with_port %>/api/v4/groups/<%= imported_project.group.id %>/-/packages/maven</url>
<mirrorOf>central</mirrorOf>
</mirror>
</mirrors>
</settings>

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Package', :orchestrated, :packages, :object_storage, :reliable, product_group: :package_registry do
RSpec.describe 'Package', :orchestrated, :packages, :object_storage, :reliable, product_group: :package_registry, feature_flag: { name: 'maven_central_request_forwarding', scope: :global } do
describe 'Maven group level endpoint' do
include Runtime::Fixtures
include Support::Helpers::MaskToken
@ -223,5 +223,94 @@ module QA
end
end
end
describe 'Maven request forwarding' do
include Runtime::Fixtures
let(:group_id) { 'com.gitlab.qa' }
let(:artifact_id) { "maven-#{SecureRandom.hex(8)}" }
let(:package_name) { "#{group_id}/#{artifact_id}".tr('.', '/') }
let(:package_version) { '1.3.7' }
let(:personal_access_token) { Runtime::Env.personal_access_token }
let(:group) { Resource::Group.fabricate_via_api! }
let(:imported_project) do
Resource::ProjectImportedFromURL.fabricate_via_browser_ui! do |project|
project.name = "maven_imported_project"
project.group = group
project.gitlab_repository_path = 'https://gitlab.com/gitlab-org/quality/imported-projects/maven.git'
end
end
let(:gitlab_address_with_port) do
uri = URI.parse(Runtime::Scenario.gitlab_address)
"#{uri.scheme}://#{uri.host}:#{uri.port}"
end
let(:package) do
Resource::Package.init do |package|
package.name = package_name
package.project = imported_project
end
end
let(:runner) do
Resource::ProjectRunner.fabricate! do |runner|
runner.name = "qa-runner-#{Time.now.to_i}"
runner.tags = ["runner-for-#{imported_project.group.name}"]
runner.executor = :docker
runner.token = imported_project.group.reload!.runners_token
end
end
before do
Runtime::Feature.enable(:maven_central_request_forwarding)
Flow::Login.sign_in_unless_signed_in
imported_project
runner
end
after do
Runtime::Feature.disable(:maven_central_request_forwarding)
runner.remove_via_api!
package.remove_via_api!
imported_project.remove_via_api!
end
it(
'uses GitLab as a mirror of the central proxy',
:skip_live_env,
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/375988'
) do
Support::Retrier.retry_on_exception(max_attempts: 3, sleep_interval: 2) do
Resource::Repository::Commit.fabricate_via_api! do |commit|
settings_xml = ERB.new(read_fixture('package_managers/maven/group/consumer/request_forwarding', 'settings.xml.erb')).result(binding)
gitlab_ci_yaml = ERB.new(read_fixture('package_managers/maven/group/consumer/request_forwarding', 'gitlab_ci.yaml.erb')).result(binding)
commit.project = imported_project
commit.commit_message = 'Add files'
commit.add_files(
[
{ file_path: '.gitlab-ci.yml', content: gitlab_ci_yaml },
{ file_path: 'settings.xml', content: settings_xml }
])
end
end
imported_project.visit!
Flow::Pipeline.visit_latest_pipeline
Page::Project::Pipeline::Show.perform do |pipeline|
pipeline.click_job('install')
end
Page::Project::Job::Show.perform do |job|
expect(job).to be_successful(timeout: 800)
end
end
end
end
end

View File

@ -151,51 +151,57 @@ RSpec.describe QA::Specs::Helpers::FeatureFlag do
it_behaves_like 'skips with given feature flag metadata', { name: 'global_ff', scope: :global }
end
context 'when run on jh production', skip: 'https://gitlab.com/gitlab-org/gitlab/-/issues/392832' do
context 'when run on jh production mainland' do
before do
allow(GitlabEdition).to receive(:jh?).and_return(true)
end
context 'when on mainland' do
before(:context) do
QA::Runtime::Scenario.define(:gitlab_address, 'https://jihulab.com')
end
before(:context) do
QA::Runtime::Scenario.define(:gitlab_address, 'https://jihulab.com')
end
context 'when no scope is defined' do
it_behaves_like 'skips with given feature flag metadata', { name: 'no_scope_ff' }
context 'when no scope is defined' do
it_behaves_like 'skips with given feature flag metadata', { name: 'no_scope_ff' }
context 'for only one test in the example group' do
it 'only skips specified test and runs all others' do
group = describe_successfully 'Feature flag set for one test' do
it('is skipped', feature_flag: { name: 'single_test_ff' }) {}
it('passes') {}
end
expect(group.examples[0].execution_result.status).to eq(:pending)
expect(group.examples[1].execution_result.status).to eq(:passed)
context 'for only one test in the example group' do
it 'only skips specified test and runs all others' do
group = describe_successfully 'Feature flag set for one test' do
it('is skipped', feature_flag: { name: 'single_test_ff' }) {}
it('passes') {}
end
expect(group.examples[0].execution_result.status).to eq(:pending)
expect(group.examples[1].execution_result.status).to eq(:passed)
end
end
end
context 'when on hk' do
before(:context) do
QA::Runtime::Scenario.define(:gitlab_address, 'https://jihulab.hk')
end
it_behaves_like 'skips with given feature flag metadata', { name: 'actor_ff', scope: :project }
context 'when no scope is defined' do
it_behaves_like 'skips with given feature flag metadata', { name: 'no_scope_ff' }
it_behaves_like 'skips with given feature flag metadata', { name: 'global_ff', scope: :global }
end
context 'for only one test in the example group' do
it 'only skips specified test and runs all others' do
group = describe_successfully 'Feature flag set for one test' do
it('is skipped', feature_flag: { name: 'single_test_ff' }) {}
it('passes') {}
end
context 'when run on jh production hk' do
before do
allow(GitlabEdition).to receive(:jh?).and_return(true)
end
expect(group.examples[0].execution_result.status).to eq(:pending)
expect(group.examples[1].execution_result.status).to eq(:passed)
before(:context) do
QA::Runtime::Scenario.define(:gitlab_address, 'https://jihulab.hk')
end
context 'when no scope is defined' do
it_behaves_like 'skips with given feature flag metadata', { name: 'no_scope_ff' }
context 'for only one test in the example group' do
it 'only skips specified test and runs all others' do
group = describe_successfully 'Feature flag set for one test' do
it('is skipped', feature_flag: { name: 'single_test_ff' }) {}
it('passes') {}
end
expect(group.examples[0].execution_result.status).to eq(:pending)
expect(group.examples[1].execution_result.status).to eq(:passed)
end
end
end

View File

@ -345,11 +345,23 @@ RSpec.describe Projects::BlobController do
end
end
it_behaves_like 'tracking unique hll events' do
context 'events tracking' do
let(:target_event) { 'g_edit_by_sfe' }
subject(:request) { put :update, params: default_params }
let(:target_event) { 'g_edit_by_sfe' }
let(:expected_value) { instance_of(Integer) }
it_behaves_like 'tracking unique hll events' do
let(:expected_value) { instance_of(Integer) }
end
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:action) { 'perform_sfe_action' }
let(:category) { described_class.to_s }
let(:namespace) { project.namespace.reload }
let(:property) { target_event }
let(:label) { 'usage_activity_by_stage_monthly.create.action_monthly_active_users_sfe_edit' }
let(:feature_flag_name) { 'route_hll_to_snowplow_phase4' }
end
end
end
@ -477,6 +489,7 @@ RSpec.describe Projects::BlobController do
describe 'POST create' do
let(:user) { create(:user) }
let(:target_event) { 'g_edit_by_sfe' }
let(:default_params) do
{
namespace_id: project.namespace,
@ -498,10 +511,18 @@ RSpec.describe Projects::BlobController do
subject(:request) { post :create, params: default_params }
it_behaves_like 'tracking unique hll events' do
let(:target_event) { 'g_edit_by_sfe' }
let(:expected_value) { instance_of(Integer) }
end
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:action) { 'perform_sfe_action' }
let(:category) { described_class.to_s }
let(:namespace) { project.namespace }
let(:property) { target_event }
let(:label) { 'usage_activity_by_stage_monthly.create.action_monthly_active_users_sfe_edit' }
let(:feature_flag_name) { 'route_hll_to_snowplow_phase4' }
end
it 'redirects to blob' do
request

View File

@ -91,23 +91,4 @@ RSpec.describe 'User changes public project visibility', :js, feature_category:
it_behaves_like 'does not require confirmation'
end
context 'with unlink_fork_network_upon_visibility_decrease = false' do
let(:project) { create(:project, :empty_repo, :public) }
before do
stub_feature_flags(unlink_fork_network_upon_visibility_decrease: false)
fork_project(project, project.first_owner)
sign_in(project.first_owner)
visit edit_project_path(project)
# https://gitlab.com/gitlab-org/gitlab/-/issues/381259
allow(Gitlab::QueryLimiting::Transaction).to receive(:threshold).and_return(110)
end
it_behaves_like 'does not require confirmation'
end
end

View File

@ -1,6 +1,8 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { GlSprintf } from '@gitlab/ui';
import { s__ } from '~/locale';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { createAlert, VARIANT_SUCCESS } from '~/flash';
@ -85,7 +87,7 @@ describe('AdminNewRunnerApp', () => {
it('pushes an alert to be shown after redirection', () => {
expect(saveAlertToLocalStorage).toHaveBeenCalledWith({
message: expect.any(String),
message: s__('Runners|Runner created.'),
variant: VARIANT_SUCCESS,
});
});

View File

@ -1,61 +0,0 @@
# frozen_string_literal: true
require "spec_helper"
RSpec.describe Analytics::CycleAnalyticsHelper do
describe '#cycle_analytics_initial_data' do
let(:user) { create(:user, name: 'fake user', username: 'fake_user') }
let(:image_path_keys) { [:empty_state_svg_path, :no_data_svg_path, :no_access_svg_path] }
let(:api_path_keys) { [:milestones_path, :labels_path] }
let(:additional_data_keys) { [:full_path, :group_id, :group_path, :project_id, :request_path] }
let(:group) { create(:group) }
subject(:cycle_analytics_data) { helper.cycle_analytics_initial_data(project, group) }
before do
project.add_maintainer(user)
end
context 'when a group is present' do
let(:project) { create(:project, group: group) }
it "sets the correct data keys" do
expect(cycle_analytics_data.keys)
.to match_array(api_path_keys + image_path_keys + additional_data_keys)
end
it "sets group paths" do
expect(cycle_analytics_data)
.to include({
full_path: project.full_path,
group_path: "/#{project.namespace.name}",
group_id: project.namespace.id,
request_path: "/#{project.full_path}/-/value_stream_analytics",
milestones_path: "/groups/#{group.name}/-/milestones.json",
labels_path: "/groups/#{group.name}/-/labels.json"
})
end
end
context 'when a group is not present' do
let(:group) { nil }
let(:project) { create(:project) }
it "sets the correct data keys" do
expect(cycle_analytics_data.keys)
.to match_array(image_path_keys + api_path_keys + additional_data_keys)
end
it "sets project name space paths" do
expect(cycle_analytics_data)
.to include({
full_path: project.full_path,
group_path: project.namespace.path,
group_id: project.namespace.id,
request_path: "/#{project.full_path}/-/value_stream_analytics",
milestones_path: "/#{project.full_path}/-/milestones.json",
labels_path: "/#{project.full_path}/-/labels.json"
})
end
end
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::RequestParams, feature_category: :value_stream_management do
it_behaves_like 'unlicensed cycle analytics request params' do
let_it_be(:user) { create(:user) }
let_it_be(:root_group) { create(:group) }
let_it_be(:project) { create(:project, group: root_group) }
let(:namespace) { project.project_namespace }
describe 'project-level data attributes' do
subject(:attributes) { described_class.new(params).to_data_attributes }
it 'includes the namespace attribute' do
expect(attributes).to match(hash_including({
namespace: {
name: project.name,
full_path: project.full_path,
avatar_url: project.avatar_url
}
}))
end
end
end
end

View File

@ -2,7 +2,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::PostgresPartition, type: :model do
RSpec.describe Gitlab::Database::PostgresPartition, type: :model, feature_category: :database do
let(:current_schema) { ActiveRecord::Base.connection.select_value("SELECT current_schema()") }
let(:schema) { 'gitlab_partitions_dynamic' }
let(:name) { '_test_partition_01' }
let(:identifier) { "#{schema}.#{name}" }
@ -56,9 +57,20 @@ RSpec.describe Gitlab::Database::PostgresPartition, type: :model do
expect(partitions.pluck(:name)).to eq([name, second_name])
end
it 'returns the partitions if the parent table schema is included in the table name' do
partitions = described_class.for_parent_table("#{current_schema}._test_partitioned_table")
expect(partitions.count).to eq(2)
expect(partitions.pluck(:name)).to eq([name, second_name])
end
it 'does not return partitions for tables not in the current schema' do
expect(described_class.for_parent_table('_test_other_table').count).to eq(0)
end
it 'does not return partitions for tables if the schema is not the current' do
expect(described_class.for_parent_table('foo_bar._test_partitioned_table').count).to eq(0)
end
end
describe '#parent_identifier' do

View File

@ -2,20 +2,38 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::TablesLocker, :reestablished_active_record_base, :delete, :silence_stdout,
:suppress_gitlab_schemas_validate_connection, feature_category: :pods do
let(:detached_partition_table) { '_test_gitlab_main_part_20220101' }
let(:lock_writes_manager) do
RSpec.describe Gitlab::Database::TablesLocker, :suppress_gitlab_schemas_validate_connection, :silence_stdout,
feature_category: :pods do
let(:default_lock_writes_manager) do
instance_double(Gitlab::Database::LockWritesManager, lock_writes: nil, unlock_writes: nil)
end
before do
allow(Gitlab::Database::LockWritesManager).to receive(:new).with(any_args).and_return(lock_writes_manager)
allow(Gitlab::Database::LockWritesManager).to receive(:new).with(any_args).and_return(default_lock_writes_manager)
# Limiting the scope of the tests to a subset of the database tables
allow(Gitlab::Database::GitlabSchema).to receive(:tables_to_schema).and_return({
'application_setttings' => :gitlab_main_clusterwide,
'projects' => :gitlab_main,
'security_findings' => :gitlab_main,
'ci_builds' => :gitlab_ci,
'ci_jobs' => :gitlab_ci,
'loose_foreign_keys_deleted_records' => :gitlab_shared,
'ar_internal_metadata' => :gitlab_internal
})
end
before(:all) do
create_partition_sql = <<~SQL
CREATE TABLE IF NOT EXISTS #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.security_findings_test_partition
PARTITION OF security_findings
FOR VALUES IN (0)
SQL
ApplicationRecord.connection.execute(create_partition_sql)
Ci::ApplicationRecord.connection.execute(create_partition_sql)
create_detached_partition_sql = <<~SQL
CREATE TABLE IF NOT EXISTS gitlab_partitions_dynamic._test_gitlab_main_part_20220101 (
CREATE TABLE IF NOT EXISTS gitlab_partitions_dynamic._test_gitlab_main_part_202201 (
id bigserial primary key not null
)
SQL
@ -31,33 +49,73 @@ RSpec.describe Gitlab::Database::TablesLocker, :reestablished_active_record_base
end
end
after(:all) do
drop_detached_partition_sql = <<~SQL
DROP TABLE IF EXISTS gitlab_partitions_dynamic._test_gitlab_main_part_20220101
SQL
shared_examples "lock tables" do |gitlab_schema, database_name|
let(:connection) { Gitlab::Database.database_base_models[database_name].connection }
let(:tables_to_lock) do
Gitlab::Database::GitlabSchema
.tables_to_schema.filter_map { |table_name, schema| table_name if schema == gitlab_schema }
end
ApplicationRecord.connection.execute(drop_detached_partition_sql)
Ci::ApplicationRecord.connection.execute(drop_detached_partition_sql)
it "locks table in schema #{gitlab_schema} and database #{database_name}" do
expect(tables_to_lock).not_to be_empty
Gitlab::Database::SharedModel.using_connection(ApplicationRecord.connection) do
Postgresql::DetachedPartition.delete_all
tables_to_lock.each do |table_name|
lock_writes_manager = instance_double(Gitlab::Database::LockWritesManager, lock_writes: nil)
expect(Gitlab::Database::LockWritesManager).to receive(:new).with(
table_name: table_name,
connection: connection,
database_name: database_name,
with_retries: true,
logger: anything,
dry_run: anything
).once.and_return(lock_writes_manager)
expect(lock_writes_manager).to receive(:lock_writes).once
end
subject
end
end
shared_examples "lock tables" do |table_schema, database_name|
let(:table_name) do
shared_examples "unlock tables" do |gitlab_schema, database_name|
let(:connection) { Gitlab::Database.database_base_models[database_name].connection }
let(:tables_to_unlock) do
Gitlab::Database::GitlabSchema
.tables_to_schema.filter_map { |table_name, schema| table_name if schema == table_schema }
.first
.tables_to_schema.filter_map { |table_name, schema| table_name if schema == gitlab_schema }
end
let(:database) { database_name }
it "unlocks table in schema #{gitlab_schema} and database #{database_name}" do
expect(tables_to_unlock).not_to be_empty
tables_to_unlock.each do |table_name|
lock_writes_manager = instance_double(Gitlab::Database::LockWritesManager, unlock_writes: nil)
expect(Gitlab::Database::LockWritesManager).to receive(:new).with(
table_name: table_name,
connection: anything,
database_name: database_name,
with_retries: true,
logger: anything,
dry_run: anything
).once.and_return(lock_writes_manager)
expect(lock_writes_manager).to receive(:unlock_writes)
end
subject
end
end
shared_examples "lock attached partitions" do |partition_identifier, database_name|
let(:connection) { Gitlab::Database.database_base_models[database_name].connection }
it 'locks the partition' do
lock_writes_manager = instance_double(Gitlab::Database::LockWritesManager, lock_writes: nil)
it "locks table in schema #{table_schema} and database #{database_name}" do
expect(Gitlab::Database::LockWritesManager).to receive(:new).with(
table_name: table_name,
connection: anything,
database_name: database,
table_name: partition_identifier,
connection: connection,
database_name: database_name,
with_retries: true,
logger: anything,
dry_run: anything
@ -68,20 +126,16 @@ RSpec.describe Gitlab::Database::TablesLocker, :reestablished_active_record_base
end
end
shared_examples "unlock tables" do |table_schema, database_name|
let(:table_name) do
Gitlab::Database::GitlabSchema
.tables_to_schema.filter_map { |table_name, schema| table_name if schema == table_schema }
.first
end
shared_examples "unlock attached partitions" do |partition_identifier, database_name|
let(:connection) { Gitlab::Database.database_base_models[database_name].connection }
let(:database) { database_name }
it 'unlocks the partition' do
lock_writes_manager = instance_double(Gitlab::Database::LockWritesManager, unlock_writes: nil)
it "unlocks table in schema #{table_schema} and database #{database_name}" do
expect(Gitlab::Database::LockWritesManager).to receive(:new).with(
table_name: table_name,
connection: anything,
database_name: database,
table_name: partition_identifier,
connection: connection,
database_name: database_name,
with_retries: true,
logger: anything,
dry_run: anything
@ -100,25 +154,29 @@ RSpec.describe Gitlab::Database::TablesLocker, :reestablished_active_record_base
describe '#lock_writes' do
subject { described_class.new.lock_writes }
it 'does not call Gitlab::Database::LockWritesManager.lock_writes' do
expect(Gitlab::Database::LockWritesManager).to receive(:new).with(any_args).and_return(lock_writes_manager)
expect(lock_writes_manager).not_to receive(:lock_writes)
it 'does not lock any table' do
expect(Gitlab::Database::LockWritesManager).to receive(:new)
.with(any_args).and_return(default_lock_writes_manager)
expect(default_lock_writes_manager).not_to receive(:lock_writes)
subject
end
include_examples "unlock tables", :gitlab_main, 'main'
include_examples "unlock tables", :gitlab_ci, 'ci'
include_examples "unlock tables", :gitlab_shared, 'main'
include_examples "unlock tables", :gitlab_internal, 'main'
it_behaves_like 'unlock tables', :gitlab_main, 'main'
it_behaves_like 'unlock tables', :gitlab_ci, 'main'
it_behaves_like 'unlock tables', :gitlab_main_clusterwide, 'main'
it_behaves_like 'unlock tables', :gitlab_shared, 'main'
it_behaves_like 'unlock tables', :gitlab_internal, 'main'
end
describe '#unlock_writes' do
subject { described_class.new.lock_writes }
it 'does call Gitlab::Database::LockWritesManager.unlock_writes' do
expect(Gitlab::Database::LockWritesManager).to receive(:new).with(any_args).and_return(lock_writes_manager)
expect(lock_writes_manager).to receive(:unlock_writes)
expect(Gitlab::Database::LockWritesManager).to receive(:new)
.with(any_args).and_return(default_lock_writes_manager)
expect(default_lock_writes_manager).to receive(:unlock_writes)
expect(default_lock_writes_manager).not_to receive(:lock_writes)
subject
end
@ -133,43 +191,53 @@ RSpec.describe Gitlab::Database::TablesLocker, :reestablished_active_record_base
describe '#lock_writes' do
subject { described_class.new.lock_writes }
include_examples "lock tables", :gitlab_ci, 'main'
include_examples "lock tables", :gitlab_main, 'ci'
it_behaves_like 'lock tables', :gitlab_ci, 'main'
it_behaves_like 'lock tables', :gitlab_main, 'ci'
it_behaves_like 'lock tables', :gitlab_main_clusterwide, 'ci'
include_examples "unlock tables", :gitlab_main, 'main'
include_examples "unlock tables", :gitlab_ci, 'ci'
include_examples "unlock tables", :gitlab_shared, 'main'
include_examples "unlock tables", :gitlab_shared, 'ci'
include_examples "unlock tables", :gitlab_internal, 'main'
include_examples "unlock tables", :gitlab_internal, 'ci'
it_behaves_like 'unlock tables', :gitlab_main_clusterwide, 'main'
it_behaves_like 'unlock tables', :gitlab_main, 'main'
it_behaves_like 'unlock tables', :gitlab_ci, 'ci'
it_behaves_like 'unlock tables', :gitlab_shared, 'main'
it_behaves_like 'unlock tables', :gitlab_shared, 'ci'
it_behaves_like 'unlock tables', :gitlab_internal, 'main'
it_behaves_like 'unlock tables', :gitlab_internal, 'ci'
gitlab_main_partition = "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.security_findings_test_partition"
it_behaves_like 'unlock attached partitions', gitlab_main_partition, 'main'
it_behaves_like 'lock attached partitions', gitlab_main_partition, 'ci'
end
describe '#unlock_writes' do
subject { described_class.new.unlock_writes }
include_examples "unlock tables", :gitlab_ci, 'main'
include_examples "unlock tables", :gitlab_main, 'ci'
include_examples "unlock tables", :gitlab_main, 'main'
include_examples "unlock tables", :gitlab_ci, 'ci'
include_examples "unlock tables", :gitlab_shared, 'main'
include_examples "unlock tables", :gitlab_shared, 'ci'
include_examples "unlock tables", :gitlab_internal, 'main'
include_examples "unlock tables", :gitlab_internal, 'ci'
it_behaves_like "unlock tables", :gitlab_ci, 'main'
it_behaves_like "unlock tables", :gitlab_main, 'ci'
it_behaves_like "unlock tables", :gitlab_main, 'main'
it_behaves_like "unlock tables", :gitlab_ci, 'ci'
it_behaves_like "unlock tables", :gitlab_shared, 'main'
it_behaves_like "unlock tables", :gitlab_shared, 'ci'
it_behaves_like "unlock tables", :gitlab_internal, 'main'
it_behaves_like "unlock tables", :gitlab_internal, 'ci'
gitlab_main_partition = "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.security_findings_test_partition"
it_behaves_like 'unlock attached partitions', gitlab_main_partition, 'main'
it_behaves_like 'unlock attached partitions', gitlab_main_partition, 'ci'
end
context 'when running in dry_run mode' do
subject { described_class.new(dry_run: true).lock_writes }
it 'passes dry_run flag to LockManger' do
it 'passes dry_run flag to LockWritesManager' do
expect(Gitlab::Database::LockWritesManager).to receive(:new).with(
table_name: 'users',
table_name: 'security_findings',
connection: anything,
database_name: 'ci',
with_retries: true,
logger: anything,
dry_run: true
).and_return(lock_writes_manager)
expect(lock_writes_manager).to receive(:lock_writes)
).and_return(default_lock_writes_manager)
expect(default_lock_writes_manager).to receive(:lock_writes)
subject
end
@ -185,8 +253,9 @@ RSpec.describe Gitlab::Database::TablesLocker, :reestablished_active_record_base
end
it 'does not lock any tables if the ci database is shared with main database' do
expect(Gitlab::Database::LockWritesManager).to receive(:new).with(any_args).and_return(lock_writes_manager)
expect(lock_writes_manager).not_to receive(:lock_writes)
expect(Gitlab::Database::LockWritesManager).to receive(:new)
.with(any_args).and_return(default_lock_writes_manager)
expect(default_lock_writes_manager).not_to receive(:lock_writes)
subject
end
@ -220,7 +289,3 @@ RSpec.describe Gitlab::Database::TablesLocker, :reestablished_active_record_base
end
end
end
def number_of_triggers(connection)
connection.select_value("SELECT count(*) FROM information_schema.triggers")
end

View File

@ -0,0 +1,80 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Patch::NodeLoader, feature_category: :redis do
using RSpec::Parameterized::TableSyntax
describe '#fetch_node_info' do
let(:redis) { double(:redis) } # rubocop:disable RSpec/VerifiedDoubles
# rubocop:disable Naming/InclusiveLanguage
where(:case_name, :args, :value) do
[
[
'when only ip address is present',
"07c37df 127.0.0.1:30004@31004 slave e7d1eec 0 1426238317239 4 connected
67ed2db 127.0.0.1:30002@31002 master - 0 1426238316232 2 connected 5461-10922
292f8b3 127.0.0.1:30003@31003 master - 0 1426238318243 3 connected 10923-16383
6ec2392 127.0.0.1:30005@31005 slave 67ed2db 0 1426238316232 5 connected
824fe11 127.0.0.1:30006@31006 slave 292f8b3 0 1426238317741 6 connected
e7d1eec 127.0.0.1:30001@31001 myself,master - 0 0 1 connected 0-5460",
{
'127.0.0.1:30004' => 'slave', '127.0.0.1:30002' => 'master', '127.0.0.1:30003' => 'master',
'127.0.0.1:30005' => 'slave', '127.0.0.1:30006' => 'slave', '127.0.0.1:30001' => 'master'
}
],
[
'when hostname is present',
"07c37df 127.0.0.1:30004@31004,host1 slave e7d1eec 0 1426238317239 4 connected
67ed2db 127.0.0.1:30002@31002,host2 master - 0 1426238316232 2 connected 5461-10922
292f8b3 127.0.0.1:30003@31003,host3 master - 0 1426238318243 3 connected 10923-16383
6ec2392 127.0.0.1:30005@31005,host4 slave 67ed2db 0 1426238316232 5 connected
824fe11 127.0.0.1:30006@31006,host5 slave 292f8b3 0 1426238317741 6 connected
e7d1eec 127.0.0.1:30001@31001,host6 myself,master - 0 0 1 connected 0-5460",
{
'host1:30004' => 'slave', 'host2:30002' => 'master', 'host3:30003' => 'master',
'host4:30005' => 'slave', 'host5:30006' => 'slave', 'host6:30001' => 'master'
}
],
[
'when auxiliary fields are present',
"07c37df 127.0.0.1:30004@31004,,shard-id=69bc slave e7d1eec 0 1426238317239 4 connected
67ed2db 127.0.0.1:30002@31002,,shard-id=114f master - 0 1426238316232 2 connected 5461-10922
292f8b3 127.0.0.1:30003@31003,,shard-id=fdb3 master - 0 1426238318243 3 connected 10923-16383
6ec2392 127.0.0.1:30005@31005,,shard-id=114f slave 67ed2db 0 1426238316232 5 connected
824fe11 127.0.0.1:30006@31006,,shard-id=fdb3 slave 292f8b3 0 1426238317741 6 connected
e7d1eec 127.0.0.1:30001@31001,,shard-id=69bc myself,master - 0 0 1 connected 0-5460",
{
'127.0.0.1:30004' => 'slave', '127.0.0.1:30002' => 'master', '127.0.0.1:30003' => 'master',
'127.0.0.1:30005' => 'slave', '127.0.0.1:30006' => 'slave', '127.0.0.1:30001' => 'master'
}
],
[
'when hostname and auxiliary fields are present',
"07c37df 127.0.0.1:30004@31004,host1,shard-id=69bc slave e7d1eec 0 1426238317239 4 connected
67ed2db 127.0.0.1:30002@31002,host2,shard-id=114f master - 0 1426238316232 2 connected 5461-10922
292f8b3 127.0.0.1:30003@31003,host3,shard-id=fdb3 master - 0 1426238318243 3 connected 10923-16383
6ec2392 127.0.0.1:30005@31005,host4,shard-id=114f slave 67ed2db 0 1426238316232 5 connected
824fe11 127.0.0.1:30006@31006,host5,shard-id=fdb3 slave 292f8b3 0 1426238317741 6 connected
e7d1eec 127.0.0.1:30001@31001,host6,shard-id=69bc myself,master - 0 0 1 connected 0-5460",
{
'host1:30004' => 'slave', 'host2:30002' => 'master', 'host3:30003' => 'master',
'host4:30005' => 'slave', 'host5:30006' => 'slave', 'host6:30001' => 'master'
}
]
]
end
# rubocop:enable Naming/InclusiveLanguage
with_them do
before do
allow(redis).to receive(:call).with([:cluster, :nodes]).and_return(args)
end
it do
expect(Redis::Cluster::NodeLoader.load_flags([redis])).to eq(value)
end
end
end
end

View File

@ -25,12 +25,29 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
end
end
it 'track snowplow event' do
track_action(author: user1, project: project)
expect_snowplow_event(
category: described_class.name,
action: 'ide_edit',
label: 'usage_activity_by_stage_monthly.create.action_monthly_active_users_ide_edit',
namespace: project.namespace,
property: event_name,
project: project,
user: user1,
context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll, event: event_name).to_h]
)
end
it 'does not track edit actions if author is not present' do
expect(track_action(author: nil, project: project)).to be_nil
end
end
context 'for web IDE edit actions' do
let(:event_name) { described_class::EDIT_BY_WEB_IDE }
it_behaves_like 'tracks and counts action' do
def track_action(params)
described_class.track_web_ide_edit_action(**params)
@ -43,6 +60,8 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
end
context 'for SFE edit actions' do
let(:event_name) { described_class::EDIT_BY_SFE }
it_behaves_like 'tracks and counts action' do
def track_action(params)
described_class.track_sfe_edit_action(**params)
@ -55,6 +74,8 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
end
context 'for snippet editor edit actions' do
let(:event_name) { described_class::EDIT_BY_SNIPPET_EDITOR }
it_behaves_like 'tracks and counts action' do
def track_action(params)
described_class.track_snippet_editor_edit_action(**params)

View File

@ -141,6 +141,18 @@ RSpec.describe Sidebars::Projects::Menus::InfrastructureMenu do
it_behaves_like 'access rights checks'
end
end
context 'when instance is not configured for Google OAuth2' do
before do
stub_feature_flags(incubation_5mp_google_cloud: true)
unconfigured_google_oauth2 = Struct.new(:app_id, :app_secret).new('', '')
allow(Gitlab::Auth::OAuth::Provider).to receive(:config_for)
.with('google_oauth2')
.and_return(unconfigured_google_oauth2)
end
it { is_expected.to be_nil }
end
end
end
end

View File

@ -54,9 +54,8 @@ RSpec.describe Ci::RunnerMachine, feature_category: :runner_fleet, type: :model
end
describe '#heartbeat', :freeze_time do
let(:runner_machine) { create(:ci_runner_machine) }
let(:runner_machine) { create(:ci_runner_machine, version: '15.0.0') }
let(:executor) { 'shell' }
let(:version) { '15.0.1' }
let(:values) do
{
ip_address: '8.8.8.8',
@ -76,18 +75,26 @@ RSpec.describe Ci::RunnerMachine, feature_category: :runner_fleet, type: :model
runner_machine.contacted_at = Time.current
end
it 'schedules version update' do
expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).with(version).once
context 'when version is changed' do
let(:version) { '15.0.1' }
heartbeat
before do
allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).with(version)
end
expect(runner_machine.runner_version).to be_nil
end
it 'schedules version information update' do
heartbeat
it 'updates cache' do
expect_redis_update
expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).with(version).once
end
heartbeat
it 'updates cache' do
expect_redis_update
heartbeat
expect(runner_machine.runner_version).to be_nil
end
end
context 'with only ip_address specified' do
@ -96,12 +103,7 @@ RSpec.describe Ci::RunnerMachine, feature_category: :runner_fleet, type: :model
end
it 'updates only ip_address' do
attrs = Gitlab::Json.dump(ip_address: '1.1.1.1', contacted_at: Time.current)
Gitlab::Redis::Cache.with do |redis|
redis_key = runner_machine.send(:cache_attribute_key)
expect(redis).to receive(:set).with(redis_key, attrs, any_args)
end
expect_redis_update(values.merge(contacted_at: Time.current))
heartbeat
end
@ -112,17 +114,29 @@ RSpec.describe Ci::RunnerMachine, feature_category: :runner_fleet, type: :model
before do
runner_machine.contacted_at = 2.hours.ago
allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).with(version).once
allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).with(version)
end
context 'with invalid runner_machine' do
before do
runner_machine.runner = nil
context 'when version is changed' do
let(:version) { '15.0.1' }
context 'with invalid runner_machine' do
before do
runner_machine.runner = nil
end
it 'still updates redis cache and database' do
expect(runner_machine).to be_invalid
expect_redis_update
does_db_update
expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async)
.with(version).once
end
end
it 'still updates redis cache and database' do
expect(runner_machine).to be_invalid
it 'updates redis cache and database' do
expect_redis_update
does_db_update
@ -132,58 +146,52 @@ RSpec.describe Ci::RunnerMachine, feature_category: :runner_fleet, type: :model
end
context 'with unchanged runner_machine version' do
let(:runner_machine) { create(:ci_runner_machine, version: version) }
let(:version) { runner_machine.version }
it 'does not schedule ci_runner_versions update' do
heartbeat
expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).not_to have_received(:perform_async)
end
end
it 'updates redis cache and database' do
expect_redis_update
does_db_update
Ci::Runner::EXECUTOR_NAME_TO_TYPES.each_key do |executor|
context "with #{executor} executor" do
let(:executor) { executor }
expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async)
.with(version).once
end
it 'updates with expected executor type' do
expect_redis_update
Ci::Runner::EXECUTOR_NAME_TO_TYPES.each_key do |executor|
context "with #{executor} executor" do
let(:executor) { executor }
heartbeat
it 'updates with expected executor type' do
expect(runner_machine.reload.read_attribute(:executor_type)).to eq(expected_executor_type)
end
def expected_executor_type
executor.gsub(/[+-]/, '_')
end
end
end
context 'with an unknown executor type' do
let(:executor) { 'some-unknown-type' }
it 'updates with unknown executor type' do
expect_redis_update
heartbeat
expect(runner_machine.reload.read_attribute(:executor_type)).to eq(expected_executor_type)
expect(runner_machine.reload.read_attribute(:executor_type)).to eq('unknown')
end
def expected_executor_type
executor.gsub(/[+-]/, '_')
end
end
end
context "with an unknown executor type" do
let(:executor) { 'some-unknown-type' }
it 'updates with unknown executor type' do
expect_redis_update
heartbeat
expect(runner_machine.reload.read_attribute(:executor_type)).to eq('unknown')
end
end
end
def expect_redis_update
def expect_redis_update(values = anything)
values_json = values == anything ? anything : Gitlab::Json.dump(values)
Gitlab::Redis::Cache.with do |redis|
redis_key = runner_machine.send(:cache_attribute_key)
expect(redis).to receive(:set).with(redis_key, anything, any_args).and_call_original
expect(redis).to receive(:set).with(redis_key, values_json, any_args).and_call_original
end
end

View File

@ -1076,13 +1076,13 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
end
end
describe '#heartbeat' do
let(:runner) { create(:ci_runner, :project) }
describe '#heartbeat', :freeze_time do
let(:runner) { create(:ci_runner, :project, version: '15.0.0') }
let(:executor) { 'shell' }
let(:version) { '15.0.1' }
let(:values) { { architecture: '18-bit', config: { gpus: "all" }, executor: executor, version: version } }
subject(:heartbeat) do
runner.heartbeat(architecture: '18-bit', config: { gpus: "all" }, executor: executor, version: version)
runner.heartbeat(values)
end
context 'when database was updated recently' do
@ -1090,27 +1090,35 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
runner.contacted_at = Time.current
end
it 'updates cache' do
expect_redis_update
expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).not_to receive(:perform_async)
context 'when version is changed' do
let(:version) { '15.0.1' }
heartbeat
before do
allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).with(version)
end
expect(runner.runner_version).to be_nil
it 'updates cache' do
expect_redis_update
heartbeat
expect(runner.runner_version).to be_nil
end
it 'schedules version information update' do
heartbeat
expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).with(version).once
end
end
context 'with only ip_address specified', :freeze_time do
subject(:heartbeat) do
runner.heartbeat(ip_address: '1.1.1.1')
let(:values) do
{ ip_address: '1.1.1.1' }
end
it 'updates only ip_address' do
attrs = Gitlab::Json.dump(ip_address: '1.1.1.1', contacted_at: Time.current)
Gitlab::Redis::Cache.with do |redis|
redis_key = runner.send(:cache_attribute_key)
expect(redis).to receive(:set).with(redis_key, attrs, any_args)
end
expect_redis_update(values.merge(contacted_at: Time.current))
heartbeat
end
@ -1121,65 +1129,81 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do
before do
runner.contacted_at = 2.hours.ago
allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async)
allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).with(version)
end
context 'with invalid runner' do
before do
runner.runner_projects.delete_all
context 'when version is changed' do
let(:version) { '15.0.1' }
context 'with invalid runner' do
before do
runner.runner_projects.delete_all
end
it 'still updates redis cache and database' do
expect(runner).to be_invalid
expect_redis_update
does_db_update
expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).with(version).once
end
end
it 'still updates redis cache and database' do
expect(runner).to be_invalid
it 'updates redis cache and database' do
expect_redis_update
does_db_update
expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).once
expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).with(version).once
end
end
context 'with unchanged runner version' do
let(:runner) { create(:ci_runner, version: version) }
let(:version) { runner.version }
it 'does not schedule ci_runner_versions update' do
heartbeat
expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).not_to have_received(:perform_async)
end
end
it 'updates redis cache and database' do
expect_redis_update
does_db_update
expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).once
end
Ci::Runner::EXECUTOR_NAME_TO_TYPES.each_key do |executor|
context "with #{executor} executor" do
let(:executor) { executor }
%w(custom shell docker docker-windows docker-ssh ssh parallels virtualbox docker+machine docker-ssh+machine kubernetes some-unknown-type).each do |executor|
context "with #{executor} executor" do
let(:executor) { executor }
it 'updates with expected executor type' do
expect_redis_update
it 'updates with expected executor type' do
heartbeat
expect(runner.reload.read_attribute(:executor_type)).to eq(expected_executor_type)
end
def expected_executor_type
executor.gsub(/[+-]/, '_')
end
end
end
context 'with an unknown executor type' do
let(:executor) { 'some-unknown-type' }
it 'updates with unknown executor type' do
expect_redis_update
heartbeat
expect(runner.reload.read_attribute(:executor_type)).to eq(expected_executor_type)
end
def expected_executor_type
return 'unknown' if executor == 'some-unknown-type'
executor.gsub(/[+-]/, '_')
expect(runner.reload.read_attribute(:executor_type)).to eq('unknown')
end
end
end
end
def expect_redis_update
def expect_redis_update(values = anything)
values_json = values == anything ? anything : Gitlab::Json.dump(values)
Gitlab::Redis::Cache.with do |redis|
redis_key = runner.send(:cache_attribute_key)
expect(redis).to receive(:set).with(redis_key, anything, any_args)
expect(redis).to receive(:set).with(redis_key, values_json, any_args).and_call_original
end
end

View File

@ -17,7 +17,7 @@ RSpec.describe API::DraftNotes, feature_category: :code_review_workflow do
let!(:draft_note_by_current_user) { create(:draft_note, merge_request: merge_request, author: user) }
let!(:draft_note_by_random_user) { create(:draft_note, merge_request: merge_request) }
let_it_be(:api_stub) { "/projects/#{project.id}/merge_requests/#{merge_request.iid}" }
let_it_be(:base_url) { "/projects/#{project.id}/merge_requests/#{merge_request.iid}/draft_notes" }
before do
project.add_developer(user)
@ -25,13 +25,13 @@ RSpec.describe API::DraftNotes, feature_category: :code_review_workflow do
describe "Get a list of merge request draft notes" do
it "returns 200 OK status" do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/draft_notes", user)
get api(base_url, user)
expect(response).to have_gitlab_http_status(:ok)
end
it "returns only draft notes authored by the current user" do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/draft_notes", user)
get api(base_url, user)
draft_note_ids = json_response.pluck("id")
@ -45,7 +45,7 @@ RSpec.describe API::DraftNotes, feature_category: :code_review_workflow do
context "when requesting an existing draft note by the user" do
before do
get api(
"/projects/#{project.id}/merge_requests/#{merge_request.iid}/draft_notes/#{draft_note_by_current_user.id}",
"#{base_url}/#{draft_note_by_current_user.id}",
user
)
end
@ -61,7 +61,7 @@ RSpec.describe API::DraftNotes, feature_category: :code_review_workflow do
context "when requesting a non-existent draft note" do
it "returns a 404 Not Found response" do
get api(
"/projects/#{project.id}/merge_requests/#{merge_request.iid}/draft_notes/#{DraftNote.last.id + 1}",
"#{base_url}/#{DraftNote.last.id + 1}",
user
)
@ -72,7 +72,7 @@ RSpec.describe API::DraftNotes, feature_category: :code_review_workflow do
context "when requesting an existing draft note by another user" do
it "returns a 404 Not Found response" do
get api(
"/projects/#{project.id}/merge_requests/#{merge_request.iid}/draft_notes/#{draft_note_by_random_user.id}",
"#{base_url}/#{draft_note_by_random_user.id}",
user
)
@ -88,7 +88,7 @@ RSpec.describe API::DraftNotes, feature_category: :code_review_workflow do
before do
delete api(
"/projects/#{project.id}/merge_requests/#{merge_request.iid}/draft_notes/#{draft_note_by_current_user.id}",
"#{base_url}/#{draft_note_by_current_user.id}",
user
)
end
@ -105,7 +105,7 @@ RSpec.describe API::DraftNotes, feature_category: :code_review_workflow do
context "when deleting a non-existent draft note" do
it "returns a 404 Not Found" do
delete api(
"/projects/#{project.id}/merge_requests/#{merge_request.iid}/draft_notes/#{non_existing_record_id}",
"#{base_url}/#{non_existing_record_id}",
user
)
@ -116,7 +116,7 @@ RSpec.describe API::DraftNotes, feature_category: :code_review_workflow do
context "when deleting a draft note by a different user" do
it "returns a 404 Not Found" do
delete api(
"/projects/#{project.id}/merge_requests/#{merge_request.iid}/draft_notes/#{draft_note_by_random_user.id}",
"#{base_url}/#{draft_note_by_random_user.id}",
user
)
@ -125,8 +125,8 @@ RSpec.describe API::DraftNotes, feature_category: :code_review_workflow do
end
end
def create_draft_note(params = {}, url = api_stub)
post api("#{url}/draft_notes", user), params: params
def create_draft_note(params = {}, url = base_url)
post api(url, user), params: params
end
describe "Create a new draft note" do
@ -219,7 +219,7 @@ RSpec.describe API::DraftNotes, feature_category: :code_review_workflow do
describe "Publishing a draft note" do
let(:publish_draft_note) do
put api(
"#{api_stub}/draft_notes/#{draft_note_by_current_user.id}/publish",
"#{base_url}/#{draft_note_by_current_user.id}/publish",
user
)
end
@ -240,7 +240,7 @@ RSpec.describe API::DraftNotes, feature_category: :code_review_workflow do
context "when publishing a non-existent draft note" do
it "returns a 404 Not Found" do
put api(
"#{api_stub}/draft_notes/#{non_existing_record_id}/publish",
"#{base_url}/#{non_existing_record_id}/publish",
user
)
@ -251,7 +251,7 @@ RSpec.describe API::DraftNotes, feature_category: :code_review_workflow do
context "when publishing a draft note by a different user" do
it "returns a 404 Not Found" do
put api(
"#{api_stub}/draft_notes/#{draft_note_by_random_user.id}/publish",
"#{base_url}/#{draft_note_by_random_user.id}/publish",
user
)

View File

@ -227,48 +227,16 @@ RSpec.describe Projects::UpdateService do
let(:user) { project.first_owner }
let(:forked_project) { fork_project(project) }
context 'and unlink forks feature flag is off' do
before do
stub_feature_flags(unlink_fork_network_upon_visibility_decrease: false)
end
it 'does not change visibility of forks' do
opts = { visibility_level: Gitlab::VisibilityLevel::PRIVATE }
it 'updates forks visibility level when parent set to more restrictive' do
opts = { visibility_level: Gitlab::VisibilityLevel::PRIVATE }
expect(project).to be_internal
expect(forked_project).to be_internal
expect(project).to be_internal
expect(forked_project).to be_internal
expect(update_project(project, user, opts)).to eq({ status: :success })
expect(update_project(project, user, opts)).to eq({ status: :success })
expect(project).to be_private
expect(forked_project.reload).to be_private
end
it 'does not update forks visibility level when parent set to less restrictive' do
opts = { visibility_level: Gitlab::VisibilityLevel::PUBLIC }
expect(project).to be_internal
expect(forked_project).to be_internal
expect(update_project(project, user, opts)).to eq({ status: :success })
expect(project).to be_public
expect(forked_project.reload).to be_internal
end
end
context 'and unlink forks feature flag is on' do
it 'does not change visibility of forks' do
opts = { visibility_level: Gitlab::VisibilityLevel::PRIVATE }
expect(project).to be_internal
expect(forked_project).to be_internal
expect(update_project(project, user, opts)).to eq({ status: :success })
expect(project).to be_private
expect(forked_project.reload).to be_internal
end
expect(project).to be_private
expect(forked_project.reload).to be_internal
end
end

View File

@ -957,7 +957,6 @@
- './ee/spec/helpers/ee/geo_helper_spec.rb'
- './ee/spec/helpers/ee/gitlab_routing_helper_spec.rb'
- './ee/spec/helpers/ee/graph_helper_spec.rb'
- './ee/spec/helpers/ee/groups/analytics/cycle_analytics_helper_spec.rb'
- './ee/spec/helpers/ee/groups/group_members_helper_spec.rb'
- './ee/spec/helpers/ee/groups_helper_spec.rb'
- './ee/spec/helpers/ee/groups/settings_helper_spec.rb'
@ -5084,7 +5083,6 @@
- './spec/helpers/admin/deploy_key_helper_spec.rb'
- './spec/helpers/admin/identities_helper_spec.rb'
- './spec/helpers/admin/user_actions_helper_spec.rb'
- './spec/helpers/analytics/cycle_analytics_helper_spec.rb'
- './spec/helpers/appearances_helper_spec.rb'
- './spec/helpers/application_helper_spec.rb'
- './spec/helpers/application_settings_helper_spec.rb'

View File

@ -0,0 +1,127 @@
# frozen_string_literal: true
RSpec.shared_examples 'unlicensed cycle analytics request params' do
let(:params) do
{
created_after: '2019-01-01',
created_before: '2019-03-01',
project_ids: [2, 3],
namespace: namespace,
current_user: user
}
end
subject { described_class.new(params) }
before do
root_group.add_owner(user)
end
describe 'validations' do
it 'is valid' do
expect(subject).to be_valid
end
context 'when `created_before` is missing' do
before do
params[:created_before] = nil
end
it 'is valid', time_travel_to: '2019-03-01' do
expect(subject).to be_valid
end
end
context 'when `created_before` is earlier than `created_after`' do
before do
params[:created_before] = '2015-01-01'
end
it 'is invalid' do
expect(subject).not_to be_valid
expect(subject.errors.messages[:created_before]).not_to be_empty
end
end
context 'when the date range exceeds 180 days' do
before do
params[:created_before] = '2019-07-15'
end
it 'is invalid' do
expect(subject).not_to be_valid
message = s_('CycleAnalytics|The given date range is larger than 180 days')
expect(subject.errors.messages[:created_after]).to include(message)
end
end
end
it 'casts `created_after` to `Time`' do
expect(subject.created_after).to be_a_kind_of(Time)
end
it 'casts `created_before` to `Time`' do
expect(subject.created_before).to be_a_kind_of(Time)
end
describe 'optional `value_stream`' do
context 'when `value_stream` is not empty' do
let(:value_stream) { instance_double('Analytics::CycleAnalytics::ValueStream') }
before do
params[:value_stream] = value_stream
end
it { expect(subject.value_stream).to eq(value_stream) }
end
context 'when `value_stream` is nil' do
before do
params[:value_stream] = nil
end
it { expect(subject.value_stream).to eq(nil) }
end
end
describe 'sorting params' do
before do
params.merge!(sort: 'duration', direction: 'asc')
end
it 'converts sorting params to symbol when passing it to data collector' do
data_collector_params = subject.to_data_collector_params
expect(data_collector_params[:sort]).to eq(:duration)
expect(data_collector_params[:direction]).to eq(:asc)
end
it 'adds sorting params to data attributes' do
data_attributes = subject.to_data_attributes
expect(data_attributes[:sort]).to eq('duration')
expect(data_attributes[:direction]).to eq('asc')
end
end
describe 'aggregation params' do
context 'when not licensed' do
it 'returns nil' do
data_collector_params = subject.to_data_attributes
expect(data_collector_params[:aggregation]).to eq(nil)
end
end
end
describe 'use_aggregated_data_collector param' do
subject(:value) { described_class.new(params).to_data_collector_params[:use_aggregated_data_collector] }
it { is_expected.to eq(false) }
end
describe 'enable_tasks_by_type_chart data attribute' do
subject(:value) { described_class.new(params).to_data_attributes[:enable_tasks_by_type_chart] }
it { is_expected.to eq(false) }
end
end

View File

@ -1331,19 +1331,19 @@
stylelint-declaration-strict-value "1.8.0"
stylelint-scss "4.2.0"
"@gitlab/svgs@3.20.0":
version "3.20.0"
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-3.20.0.tgz#4ee4f2f24304d13ccce58f82c2ecd87e556f35b4"
integrity sha512-nYTF4j5kon4XbBr/sAzuubgxjIne9+RTZLmSrSaL9FL4eyuv9aa7YMCcOrlIbYX5jlSYlcD+ck2F2M1sqXXOBA==
"@gitlab/svgs@3.21.0":
version "3.21.0"
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-3.21.0.tgz#a939b7ee4f766d793643ddcab7722fc549fa0e20"
integrity sha512-sWQOGhprA0RDEATHqo4ReHaGHKrpsswBvFMyM9ghd6OF6JbRdOwbLqFKg0qGHRkaPTg8uWLC7i2XfN7xlZAKfQ==
"@gitlab/ui@56.0.0":
version "56.0.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-56.0.0.tgz#fd3b144893b01a8eae408ce8ca4477240aeb493e"
integrity sha512-kuoI+q5zYmmK9stpb1YJKvJNTD+NXiG6EOCw5+UQDpo5PgeDzrQ4vm7JuocYU9dNZlHMR2vVPMxi3uG0+y5JkA==
"@gitlab/ui@56.1.1":
version "56.1.1"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-56.1.1.tgz#d696037356b5c9e6160e1e7bfd2a154991e1e418"
integrity sha512-1bczlu+e0VKfGlDKEMrJOCJhRfi3adFywlxT8R9lwpFVCOh1RTfEZeJ46VCATFxmUuqtjoFQ5xIvFTDv/DLk0A==
dependencies:
"@popperjs/core" "^2.11.2"
bootstrap-vue "2.20.1"
dompurify "^2.4.3"
dompurify "^2.4.4"
echarts "^5.3.2"
iframe-resizer "^4.3.2"
lodash "^4.17.20"
@ -5322,7 +5322,7 @@ dompurify@2.3.8:
resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.3.8.tgz#224fe9ae57d7ebd9a1ae1ac18c1c1ca3f532226f"
integrity sha512-eVhaWoVibIzqdGYjwsBWodIQIaXFSB+cKDf4cfxLMsK0xiud6SE+/WCVx/Xw/UwQsa4cS3T2eITcdtmTg2UKcw==
dompurify@^2.4.3, dompurify@^2.4.4:
dompurify@^2.4.4:
version "2.4.4"
resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.4.4.tgz#c17803931dd524e1b68e0e940a84567f9498f4bd"
integrity sha512-1e2SpqHiRx4DPvmRuXU5J0di3iQACwJM+mFGE2HAkkK7Tbnfk9WcghcAmyWc9CRrjyRRUpmuhPUH6LphQQR3EQ==