Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-05-02 21:09:12 +00:00
parent 63fbe648bb
commit b6a2658061
51 changed files with 369 additions and 553 deletions

View File

@ -1,5 +1,5 @@
import { spriteIcon } from '~/lib/utils/common_utils';
import { sprintf, s__ } from '~/locale';
import { sprintf, s__, n__ } from '~/locale';
import { LOADING, ERROR, SUCCESS, STATUS_NOT_FOUND } from '../../constants';
export const hasCodequalityIssues = (state) =>
@ -29,9 +29,17 @@ export const codequalityText = (state) => {
},
);
} else if (resolvedIssues.length) {
text = s__(`ciReport|Code quality improved`);
text = n__(
`ciReport|Code quality improved due to 1 resolved issue`,
`ciReport|Code quality improved due to %d resolved issues`,
resolvedIssues.length,
);
} else if (newIssues.length) {
text = s__(`ciReport|Code quality degraded`);
text = n__(
`ciReport|Code quality degraded due to 1 new issue`,
`ciReport|Code quality degraded due to %d new issues`,
newIssues.length,
);
}
return text;

View File

@ -1,5 +1,5 @@
<script>
import { GlAlert } from '@gitlab/ui';
import { GlAlert, GlSkeletonLoader } from '@gitlab/ui';
import { i18n } from '../constants';
import workItemQuery from '../graphql/work_item.query.graphql';
import workItemTitleSubscription from '../graphql/work_item_title.subscription.graphql';
@ -11,6 +11,7 @@ export default {
i18n,
components: {
GlAlert,
GlSkeletonLoader,
WorkItemActions,
WorkItemTitle,
WorkItemState,
@ -80,23 +81,35 @@ export default {
{{ error }}
</gl-alert>
<div class="gl-display-flex">
<work-item-title
:loading="workItemLoading"
:work-item-id="workItem.id"
:work-item-title="workItem.title"
:work-item-type="workItemType"
class="gl-mr-5"
@error="error = $event"
/>
<work-item-actions
:work-item-id="workItem.id"
:can-delete="canDelete"
class="gl-ml-auto gl-mt-5"
@workItemDeleted="handleWorkItemDeleted"
@error="error = $event"
/>
<div v-if="workItemLoading" class="gl-max-w-26 gl-py-5">
<gl-skeleton-loader :height="65" :width="240">
<rect width="240" height="20" x="5" y="0" rx="4" />
<rect width="100" height="20" x="5" y="45" rx="4" />
</gl-skeleton-loader>
</div>
<work-item-state :loading="workItemLoading" :work-item="workItem" @error="error = $event" />
<template v-else>
<div class="gl-display-flex">
<work-item-title
:work-item-id="workItem.id"
:work-item-title="workItem.title"
:work-item-type="workItemType"
class="gl-mr-5"
@error="error = $event"
@updated="$emit('workItemUpdated')"
/>
<work-item-actions
:work-item-id="workItem.id"
:can-delete="canDelete"
class="gl-ml-auto gl-mt-5"
@workItemDeleted="handleWorkItemDeleted"
@error="error = $event"
/>
</div>
<work-item-state
:work-item="workItem"
@error="error = $event"
@updated="$emit('workItemUpdated')"
/>
</template>
</section>
</template>

View File

@ -1,5 +1,4 @@
<script>
import { GlLoadingIcon } from '@gitlab/ui';
import * as Sentry from '@sentry/browser';
import Tracking from '~/tracking';
import {
@ -14,16 +13,10 @@ import ItemState from './item_state.vue';
export default {
components: {
GlLoadingIcon,
ItemState,
},
mixins: [Tracking.mixin()],
props: {
loading: {
type: Boolean,
required: false,
default: false,
},
workItem: {
type: Object,
required: true,
@ -94,9 +87,8 @@ export default {
</script>
<template>
<gl-loading-icon v-if="loading" class="gl-mt-3" size="md" />
<item-state
v-else-if="workItem.state"
v-if="workItem.state"
:state="workItem.state"
:loading="updateInProgress"
@changed="updateWorkItemState"

View File

@ -1,5 +1,4 @@
<script>
import { GlLoadingIcon } from '@gitlab/ui';
import Tracking from '~/tracking';
import { i18n } from '../constants';
import updateWorkItemMutation from '../graphql/update_work_item.mutation.graphql';
@ -7,16 +6,10 @@ import ItemTitle from './item_title.vue';
export default {
components: {
GlLoadingIcon,
ItemTitle,
},
mixins: [Tracking.mixin()],
props: {
loading: {
type: Boolean,
required: false,
default: false,
},
workItemId: {
type: String,
required: false,
@ -68,6 +61,5 @@ export default {
</script>
<template>
<gl-loading-icon v-if="loading" class="gl-mt-3" size="md" />
<item-title v-else :title="workItemTitle" @title-changed="updateTitle" />
<item-title :title="workItemTitle" @title-changed="updateTitle" />
</template>

View File

@ -7,6 +7,8 @@ class Dashboard::GroupsController < Dashboard::ApplicationController
feature_category :subgroups
urgency :low, [:index]
def index
groups = GroupsFinder.new(current_user, all_available: false).execute
render_group_tree(groups)

View File

@ -18,7 +18,7 @@ class DashboardController < Dashboard::ApplicationController
feature_category :team_planning, [:issues, :issues_calendar]
feature_category :code_review, [:merge_requests]
urgency :low, [:merge_requests]
urgency :low, [:merge_requests, :activity]
def activity
respond_to do |format|

View File

@ -62,7 +62,7 @@ class GroupsController < Groups::ApplicationController
urgency :high, [:unfoldered_environment_names]
# TODO: Set #show to higher urgency after resolving https://gitlab.com/gitlab-org/gitlab/-/issues/334795
urgency :low, [:merge_requests, :show, :create, :new, :update, :projects, :destroy, :edit]
urgency :low, [:merge_requests, :show, :create, :new, :update, :projects, :destroy, :edit, :activity]
def index
redirect_to(current_user ? dashboard_groups_path : explore_groups_path)

View File

@ -20,60 +20,44 @@ class JiraConnect::ApplicationController < ApplicationController
end
def verify_qsh_claim!
payload, _ = decode_auth_token!
return if request.format.json? && payload['qsh'] == 'context-qsh'
return if request.format.json? && jwt.verify_context_qsh_claim
# Make sure `qsh` claim matches the current request
render_403 unless payload['qsh'] == Atlassian::Jwt.create_query_string_hash(request.url, request.method, jira_connect_base_url)
rescue StandardError
render_403
render_403 unless jwt.verify_qsh_claim(request.url, request.method, jira_connect_base_url)
end
def atlassian_jwt_valid?
return false unless installation_from_jwt
# Verify JWT signature with our stored `shared_secret`
decode_auth_token!
rescue JWT::DecodeError
false
jwt.valid?(installation_from_jwt.shared_secret)
end
def installation_from_jwt
strong_memoize(:installation_from_jwt) do
next unless claims['iss']
next unless jwt.iss_claim
JiraConnectInstallation.find_by_client_key(claims['iss'])
end
end
def claims
strong_memoize(:claims) do
next {} unless auth_token
# Decode without verification to get `client_key` in `iss`
payload, _ = Atlassian::Jwt.decode(auth_token, nil, false)
payload
JiraConnectInstallation.find_by_client_key(jwt.iss_claim)
end
end
def jira_user
strong_memoize(:jira_user) do
next unless installation_from_jwt
next unless claims['sub']
next unless jwt.sub_claim
# This only works for Jira Cloud installations.
installation_from_jwt.client.user_info(claims['sub'])
installation_from_jwt.client.user_info(jwt.sub_claim)
end
end
def decode_auth_token!
Atlassian::Jwt.decode(auth_token, installation_from_jwt.shared_secret)
def jwt
strong_memoize(:jwt) do
Atlassian::JiraConnect::Jwt::Symmetric.new(auth_token)
end
end
def auth_token
strong_memoize(:auth_token) do
params[:jwt] || request.headers['Authorization']&.split(' ', 2)&.last
end
params[:jwt] || request.headers['Authorization']&.split(' ', 2)&.last
end
end

View File

@ -2,7 +2,7 @@
class Profiles::KeysController < Profiles::ApplicationController
feature_category :users
urgency :low, [:create]
urgency :low, [:create, :index]
def index
@keys = current_user.keys.order_id_desc

View File

@ -4,6 +4,8 @@ class Profiles::PreferencesController < Profiles::ApplicationController
before_action :user
feature_category :users
urgency :low, [:show]
urgency :medium, [:update]
def show

View File

@ -15,7 +15,7 @@ class ProfilesController < Profiles::ApplicationController
end
feature_category :users
urgency :low, [:update]
urgency :low, [:show, :update]
def show
end

View File

@ -9,6 +9,8 @@ class Projects::AvatarsController < Projects::ApplicationController
feature_category :projects
urgency :low, [:show]
def show
@blob = @repository.blob_at_branch(@repository.root_ref, @project.avatar_in_git)

View File

@ -5,6 +5,8 @@ class Projects::StarrersController < Projects::ApplicationController
feature_category :projects
urgency :low, [:index]
def index
@starrers = UsersStarProjectsFinder.new(@project, params, current_user: @current_user).execute
@sort = params[:sort].presence || sort_value_name

View File

@ -59,8 +59,10 @@ class ProjectsController < Projects::ApplicationController
urgency :low, [:export, :remove_export, :generate_new_export, :download_export]
# TODO: Set high urgency for #show https://gitlab.com/gitlab-org/gitlab/-/issues/334444
urgency :low, [:refs, :show, :toggle_star, :transfer, :archive, :destroy, :update, :activity,
:edit, :new, :export, :remove_export, :generate_new_export, :download_export]
urgency :low, [:refs, :show, :toggle_star, :transfer, :archive, :destroy, :update, :create,
:activity, :edit, :new, :export, :remove_export, :generate_new_export, :download_export]
urgency :high, [:unfoldered_environment_names]
def index

View File

@ -34,8 +34,8 @@ class UsersController < ApplicationController
feature_category :snippets, [:snippets]
# TODO: Set higher urgency after resolving https://gitlab.com/gitlab-org/gitlab/-/issues/357914
urgency :low, [:show, :calendar_activities, :activity, :projects, :groups]
urgency :medium, [:calendar]
urgency :low, [:show, :calendar_activities, :contributed, :activity, :projects, :groups]
urgency :default, [:calendar, :followers, :following, :starred]
urgency :high, [:exists]
def show

View File

@ -1,10 +1,6 @@
# frozen_string_literal: true
module ContainerRegistryHelper
def container_registry_expiration_policies_throttling?
Feature.enabled?(:container_registry_expiration_policies_throttling, default_enabled: :yaml)
end
def container_repository_gid_prefix
"gid://#{GlobalID.app}/#{ContainerRepository.name}/"
end

View File

@ -461,10 +461,6 @@ class ContainerRepository < ApplicationRecord
client.delete_repository_tag_by_name(self.path, name)
end
def reset_expiration_policy_started_at!
update!(expiration_policy_started_at: nil)
end
def start_expiration_policy!
update!(expiration_policy_started_at: Time.zone.now)
end

View File

@ -1,18 +0,0 @@
# frozen_string_literal: true
class ContainerExpirationPolicyService < BaseService
InvalidPolicyError = Class.new(StandardError)
def execute(container_expiration_policy)
container_expiration_policy.schedule_next_run!
container_expiration_policy.container_repositories.find_each do |container_repository|
CleanupContainerRepositoryWorker.perform_async(
nil,
container_repository.id,
container_expiration_policy.policy_params
.merge(container_expiration_policy: true)
)
end
end
end

View File

@ -117,7 +117,6 @@ module Projects
@counts[:before_truncate_size] = @tags.size
@counts[:after_truncate_size] = @tags.size
return unless throttling_enabled?
return if max_list_size == 0
# truncate the list to make sure that after the #filter_keep_n
@ -151,10 +150,6 @@ module Projects
!!result
end
def throttling_enabled?
Feature.enabled?(:container_registry_expiration_policies_throttling, default_enabled: :yaml)
end
def max_list_size
::Gitlab::CurrentSettings.current_application_settings.container_registry_cleanup_tags_service_max_list_size.to_i
end

View File

@ -46,18 +46,11 @@ module Projects
end
def timeout?(start_time)
return false unless throttling_enabled?
return false if service_timeout.in?(DISABLED_TIMEOUTS)
(Time.zone.now - start_time) > service_timeout
end
def throttling_enabled?
strong_memoize(:feature_flag) do
Feature.enabled?(:container_registry_expiration_policies_throttling, default_enabled: :yaml)
end
end
def service_timeout
::Gitlab::CurrentSettings.current_application_settings.container_registry_delete_tags_service_timeout
end

View File

@ -13,7 +13,6 @@
= f.gitlab_ui_checkbox_component :container_expiration_policies_enable_historic_entries,
'%{label} %{label_link}'.html_safe % { label: label, label_link: label_link },
help_text: '%{help_text} %{help_link}'.html_safe % { help_text: help_text, help_link: help_link }
- if container_registry_expiration_policies_throttling?
.form-group
= f.label :container_registry_delete_tags_service_timeout, _('Cleanup policy maximum processing time (seconds)'), class: 'label-bold'
= f.number_field :container_registry_delete_tags_service_timeout, min: 0, class: 'form-control gl-form-input'

View File

@ -23,31 +23,17 @@ class CleanupContainerRepositoryWorker
return unless valid?
if run_by_container_expiration_policy?
container_repository.start_expiration_policy!
end
result = Projects::ContainerRepository::CleanupTagsService
Projects::ContainerRepository::CleanupTagsService
.new(container_repository, current_user, params)
.execute
if run_by_container_expiration_policy? && result[:status] == :success
container_repository.reset_expiration_policy_started_at!
end
end
private
def valid?
return true if run_by_container_expiration_policy?
current_user && container_repository && project
end
def run_by_container_expiration_policy?
@params['container_expiration_policy'] && container_repository.present? && project.present?
end
def project
container_repository&.project
end

View File

@ -27,7 +27,6 @@ module ContainerExpirationPolicies
].freeze
def perform_work
return unless throttling_enabled?
return unless container_repository
log_extra_metadata_on_done(:container_repository_id, container_repository.id)
@ -45,8 +44,6 @@ module ContainerExpirationPolicies
end
def max_running_jobs
return 0 unless throttling_enabled?
::Gitlab::CurrentSettings.container_registry_expiration_policies_worker_capacity
end
@ -122,10 +119,6 @@ module ContainerExpirationPolicies
policy.next_run_at < now || (now + max_cleanup_execution_time.seconds < policy.next_run_at)
end
def throttling_enabled?
Feature.enabled?(:container_registry_expiration_policies_throttling, default_enabled: :yaml)
end
def max_cleanup_execution_time
::Gitlab::CurrentSettings.container_registry_delete_tags_service_timeout
end

View File

@ -5,7 +5,11 @@ class ContainerExpirationPolicyWorker # rubocop:disable Scalability/IdempotentWo
data_consistency :always
# rubocop:disable Scalability/CronWorkerContext
# This worker does not perform work scoped to a context
include CronjobQueue
# rubocop:enable Scalability/CronWorkerContext
include ExclusiveLeaseGuard
feature_category :container_registry
@ -17,7 +21,9 @@ class ContainerExpirationPolicyWorker # rubocop:disable Scalability/IdempotentWo
def perform
process_stale_ongoing_cleanups
disable_policies_without_container_repositories
throttling_enabled? ? perform_throttled : perform_unthrottled
try_obtain_lease do
ContainerExpirationPolicies::CleanupContainerRepositoryWorker.perform_with_capacity
end
log_counts
end
@ -54,54 +60,6 @@ class ContainerExpirationPolicyWorker # rubocop:disable Scalability/IdempotentWo
.update_all(expiration_policy_cleanup_status: :cleanup_unfinished)
end
def perform_unthrottled
with_runnable_policy(preloaded: true) do |policy|
with_context(project: policy.project,
user: nil) do |project:, user:|
ContainerExpirationPolicyService.new(project, user)
.execute(policy)
end
end
end
def perform_throttled
try_obtain_lease do
ContainerExpirationPolicies::CleanupContainerRepositoryWorker.perform_with_capacity
end
end
# TODO : remove the preload option when cleaning FF container_registry_expiration_policies_throttling
def with_runnable_policy(preloaded: false)
ContainerExpirationPolicy.runnable_schedules.each_batch(of: BATCH_SIZE) do |policies|
# rubocop: disable CodeReuse/ActiveRecord
cte = Gitlab::SQL::CTE.new(:batched_policies, policies.limit(BATCH_SIZE))
# rubocop: enable CodeReuse/ActiveRecord
scope = cte.apply_to(ContainerExpirationPolicy.all).with_container_repositories
scope = scope.preloaded if preloaded
scope.each do |policy|
if policy.valid?
yield policy
else
disable_invalid_policy!(policy)
end
end
end
end
def disable_invalid_policy!(policy)
policy.disable!
Gitlab::ErrorTracking.log_exception(
::ContainerExpirationPolicyWorker::InvalidPolicyError.new,
container_expiration_policy_id: policy.id
)
end
def throttling_enabled?
Feature.enabled?(:container_registry_expiration_policies_throttling, default_enabled: :yaml)
end
def lease_timeout
5.hours
end

View File

@ -24,8 +24,6 @@
- code_review
- code_testing
- compliance_management
- container_host_security
- container_network_security
- container_registry
- container_scanning
- continuous_delivery
@ -55,6 +53,7 @@
- experimentation_expansion
- feature_flags
- five_minute_production_app
- fulfillment_platform
- fuzz_testing
- geo_replication
- git_lfs

View File

@ -1,8 +0,0 @@
---
name: container_registry_expiration_policies_throttling
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/36319
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/238190
milestone: '13.4'
type: development
group: group::package
default_enabled: true

View File

@ -118,8 +118,7 @@ with [domain expertise](#domain-experts).
1. If your merge request includes frontend changes (*1*), it must be
**approved by a [frontend maintainer](https://about.gitlab.com/handbook/engineering/projects/#gitlab_maintainers_frontend)**.
1. If your merge request includes user-facing changes (*3*), it must be
**approved by a [Product Designer](https://about.gitlab.com/handbook/engineering/projects/#gitlab_reviewers_UX)**,
based on assignments in the appropriate [DevOps stage group](https://about.gitlab.com/handbook/product/categories/#devops-stages).
**approved by a [Product Designer](https://about.gitlab.com/handbook/engineering/projects/#gitlab_reviewers_UX)**.
See the [design and user interface guidelines](contributing/design.md) for details.
1. If your merge request includes adding a new JavaScript library (*1*)...
- If the library significantly increases the

View File

@ -158,12 +158,8 @@ Here are examples of regex patterns you may want to use:
### Set cleanup limits to conserve resources
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/288812) in GitLab 13.9 [with a flag](../../../administration/feature_flags.md) named `container_registry_expiration_policies_throttling`. Disabled by default.
> - [Enabled by default](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/80815) in GitLab 14.9.
FLAG:
By default this feature is available in GitLab 14.9. To disable the feature, an administrator can
[disable the feature flag](../../../administration/feature_flags.md)
named `container_registry_expiration_policies_throttling`.
> - [Enabled by default](https://gitlab.com/groups/gitlab-org/-/epics/2270) in GitLab 14.9.
> - [Removed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/84996) the feature flag `container_registry_expiration_policies_throttling` in GitLab 15.0.
Cleanup policies are executed as a background process. This process is complex, and depending on the number of tags to delete,
the process can take time to finish.

View File

@ -274,3 +274,31 @@ In some cases, pull mirroring does not transfer LFS files. This issue occurs whe
[Fixed](https://gitlab.com/gitlab-org/gitlab/-/issues/335123) in GitLab 14.0.6.
- You mirror an external repository using object storage.
An issue exists [to fix this problem](https://gitlab.com/gitlab-org/gitlab/-/issues/335495).
### `The repository is being updated`, but neither fails nor succeeds visibly
In rare cases, mirroring slots on Redis can become exhausted,
possibly because Sidekiq workers are reaped due to out-of-memory (OoM) events.
When this occurs, mirroring jobs start and complete quickly, but they neither
fail nor succeed. They also do not leave a clear log. To check for this problem:
1. Enter the [Rails console](../../../../administration/operations/rails_console.md)
and check Redis' mirroring capacity:
```ruby
current = Gitlab::Redis::SharedState.with { |redis| redis.scard('MIRROR_PULL_CAPACITY') }.to_i
maximum = Gitlab::CurrentSettings.mirror_max_capacity
available = maximum - current
```
1. If the mirroring capacity is `0` or very low, you can drain all stuck jobs with:
```ruby
Gitlab::Redis::SharedState.with { |redis| redis.smembers('MIRROR_PULL_CAPACITY') }.each do |pid|
Gitlab::Redis::SharedState.with { |redis| redis.srem('MIRROR_PULL_CAPACITY', pid) }
end
```
1. After you run the command, the [background jobs page](../../../admin_area/index.md#background-jobs)
should show new mirroring jobs being scheduled, especially when
[triggered manually](#update-a-mirror).

View File

@ -115,6 +115,21 @@ and mirroring attempts stop. This failure is visible in either the:
To resume project mirroring, [force an update](index.md#force-an-update).
If many projects are affected by this problem, such as after a long network or
server outage, you can use the [Rails console](../../../../administration/operations/rails_console.md)
to identify and update all affected projects with this command:
```ruby
Project.find_each do |p|
if p.import_state && p.import_state.retry_count >= 14
puts "Resetting mirroring operation for #{p.full_path}"
p.import_state.reset_retry_count
p.import_state.set_next_execution_to_now(prioritized: true)
p.import_state.save!
end
end
```
## Related topics
- Configure [pull mirroring intervals](../../../../administration/instance_limits.md#pull-mirroring-interval)

View File

@ -52,7 +52,7 @@ module API
params do
requires :id, type: String, desc: "Namespace's ID or path"
end
get ':id', requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS, feature_category: :subgroups do
get ':id', requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS, feature_category: :subgroups, urgency: :low do
user_namespace = find_namespace!(params[:id])
present user_namespace, with: Entities::Namespace, current_user: current_user

View File

@ -91,7 +91,7 @@ module API
# rubocop:disable CodeReuse/Worker
CleanupContainerRepositoryWorker.perform_async(current_user.id, repository.id,
declared_params.except(:repository_id).merge(container_expiration_policy: false))
declared_params.except(:repository_id))
# rubocop:enable CodeReuse/Worker
track_package_event('delete_tag_bulk', :container, user: current_user, project: user_project, namespace: user_project.namespace)

View File

@ -0,0 +1,55 @@
# frozen_string_literal: true
module Atlassian
module JiraConnect
module Jwt
class Symmetric
include Gitlab::Utils::StrongMemoize
CONTEXT_QSH_STRING = 'context-qsh'
def initialize(jwt)
@jwt = jwt
end
def iss_claim
jwt_headers['iss']
end
def sub_claim
jwt_headers['sub']
end
def valid?(shared_secret)
Atlassian::Jwt.decode(@jwt, shared_secret).present?
rescue JWT::DecodeError
false
end
def verify_qsh_claim(url_with_query, method, url)
qsh_claim == Atlassian::Jwt.create_query_string_hash(url_with_query, method, url)
rescue StandardError
false
end
def verify_context_qsh_claim
qsh_claim == CONTEXT_QSH_STRING
end
private
def qsh_claim
jwt_headers['qsh']
end
def jwt_headers
strong_memoize(:jwt_headers) do
Atlassian::Jwt.decode(@jwt, nil, false).first
rescue JWT::DecodeError
{}
end
end
end
end
end
end

View File

@ -13,7 +13,7 @@ stages:
a11y:
stage: accessibility
image: registry.gitlab.com/gitlab-org/ci-cd/accessibility:6.1.1
image: registry.gitlab.com/gitlab-org/ci-cd/accessibility:6.2.3
script:
- /gitlab-accessibility.sh "$a11y_urls"
allow_failure: true

View File

@ -44342,11 +44342,15 @@ msgstr ""
msgid "ciReport|Code Quality test metrics results are being parsed"
msgstr ""
msgid "ciReport|Code quality degraded"
msgstr ""
msgid "ciReport|Code quality degraded due to 1 new issue"
msgid_plural "ciReport|Code quality degraded due to %d new issues"
msgstr[0] ""
msgstr[1] ""
msgid "ciReport|Code quality improved"
msgstr ""
msgid "ciReport|Code quality improved due to 1 resolved issue"
msgid_plural "ciReport|Code quality improved due to %d resolved issues"
msgstr[0] ""
msgstr[1] ""
msgid "ciReport|Code quality scanning detected %{issueCount} changes in merged results"
msgstr ""

View File

@ -396,7 +396,6 @@ RSpec.describe 'Admin updates settings' do
end
context 'Container Registry' do
let(:feature_flag_enabled) { true }
let(:client_support) { true }
let(:settings_titles) do
{
@ -409,18 +408,9 @@ RSpec.describe 'Admin updates settings' do
before do
stub_container_registry_config(enabled: true)
stub_feature_flags(container_registry_expiration_policies_throttling: feature_flag_enabled)
allow(ContainerRegistry::Client).to receive(:supports_tag_delete?).and_return(client_support)
end
shared_examples 'not having container registry setting' do |registry_setting|
it "lacks the container setting #{registry_setting}" do
visit ci_cd_admin_application_settings_path
expect(page).not_to have_content(settings_titles[registry_setting])
end
end
%i[container_registry_delete_tags_service_timeout container_registry_expiration_policies_worker_capacity container_registry_cleanup_tags_service_max_list_size].each do |setting|
context "for container registry setting #{setting}" do
it 'changes the setting' do
@ -434,12 +424,6 @@ RSpec.describe 'Admin updates settings' do
expect(current_settings.public_send(setting)).to eq(400)
expect(page).to have_content "Application settings saved successfully"
end
context 'with feature flag disabled' do
let(:feature_flag_enabled) { false }
it_behaves_like 'not having container registry setting', setting
end
end
end
@ -457,12 +441,6 @@ RSpec.describe 'Admin updates settings' do
expect(current_settings.container_registry_expiration_policies_caching).to eq(!old_value)
expect(page).to have_content "Application settings saved successfully"
end
context 'with feature flag disabled' do
let(:feature_flag_enabled) { false }
it_behaves_like 'not having container registry setting', :container_registry_expiration_policies_caching
end
end
end
end

View File

@ -61,8 +61,8 @@ describe('Codequality reports store getters', () => {
it.each`
resolvedIssues | newIssues | expectedText
${0} | ${0} | ${'No changes to code quality'}
${0} | ${1} | ${'Code quality degraded'}
${2} | ${0} | ${'Code quality improved'}
${0} | ${1} | ${'Code quality degraded due to 1 new issue'}
${2} | ${0} | ${'Code quality improved due to 2 resolved issues'}
${1} | ${2} | ${'Code quality scanning detected 3 changes in merged results'}
`(
'returns a summary containing $resolvedIssues resolved issues and $newIssues new issues',

View File

@ -1,4 +1,3 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
@ -24,19 +23,16 @@ describe('WorkItemState component', () => {
const mutationSuccessHandler = jest.fn().mockResolvedValue(updateWorkItemMutationResponse);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findItemState = () => wrapper.findComponent(ItemState);
const createComponent = ({
state = STATE_OPEN,
loading = false,
mutationHandler = mutationSuccessHandler,
} = {}) => {
const { id, workItemType } = workItemQueryResponse.data.workItem;
wrapper = shallowMount(WorkItemState, {
apolloProvider: createMockApollo([[updateWorkItemMutation, mutationHandler]]),
propsData: {
loading,
workItem: {
id,
state,
@ -50,32 +46,10 @@ describe('WorkItemState component', () => {
wrapper.destroy();
});
describe('when loading', () => {
beforeEach(() => {
createComponent({ loading: true });
});
it('renders state', () => {
createComponent();
it('renders loading spinner', () => {
expect(findLoadingIcon().exists()).toBe(true);
});
it('does not render state', () => {
expect(findItemState().exists()).toBe(false);
});
});
describe('when loaded', () => {
beforeEach(() => {
createComponent({ loading: false });
});
it('does not render loading spinner', () => {
expect(findLoadingIcon().exists()).toBe(false);
});
it('renders state', () => {
expect(findItemState().props('state')).toBe(workItemQueryResponse.data.workItem.state);
});
expect(findItemState().props('state')).toBe(workItemQueryResponse.data.workItem.state);
});
describe('when updating the state', () => {

View File

@ -1,4 +1,3 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
@ -18,15 +17,13 @@ describe('WorkItemTitle component', () => {
const mutationSuccessHandler = jest.fn().mockResolvedValue(updateWorkItemMutationResponse);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findItemTitle = () => wrapper.findComponent(ItemTitle);
const createComponent = ({ loading = false, mutationHandler = mutationSuccessHandler } = {}) => {
const createComponent = ({ mutationHandler = mutationSuccessHandler } = {}) => {
const { id, title, workItemType } = workItemQueryResponse.data.workItem;
wrapper = shallowMount(WorkItemTitle, {
apolloProvider: createMockApollo([[updateWorkItemMutation, mutationHandler]]),
propsData: {
loading,
workItemId: id,
workItemTitle: title,
workItemType: workItemType.name,
@ -38,32 +35,10 @@ describe('WorkItemTitle component', () => {
wrapper.destroy();
});
describe('when loading', () => {
beforeEach(() => {
createComponent({ loading: true });
});
it('renders title', () => {
createComponent();
it('renders loading spinner', () => {
expect(findLoadingIcon().exists()).toBe(true);
});
it('does not render title', () => {
expect(findItemTitle().exists()).toBe(false);
});
});
describe('when loaded', () => {
beforeEach(() => {
createComponent({ loading: false });
});
it('does not render loading spinner', () => {
expect(findLoadingIcon().exists()).toBe(false);
});
it('renders title', () => {
expect(findItemTitle().props('title')).toBe(workItemQueryResponse.data.workItem.title);
});
expect(findItemTitle().props('title')).toBe(workItemQueryResponse.data.workItem.title);
});
describe('when updating the title', () => {

View File

@ -1,10 +1,11 @@
import { GlAlert } from '@gitlab/ui';
import { GlAlert, GlSkeletonLoader } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import WorkItemDetail from '~/work_items/components/work_item_detail.vue';
import WorkItemState from '~/work_items/components/work_item_state.vue';
import WorkItemTitle from '~/work_items/components/work_item_title.vue';
import { i18n } from '~/work_items/constants';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
@ -20,7 +21,9 @@ describe('WorkItemDetail component', () => {
const initialSubscriptionHandler = jest.fn().mockResolvedValue(workItemTitleSubscriptionResponse);
const findAlert = () => wrapper.findComponent(GlAlert);
const findSkeleton = () => wrapper.findComponent(GlSkeletonLoader);
const findWorkItemTitle = () => wrapper.findComponent(WorkItemTitle);
const findWorkItemState = () => wrapper.findComponent(WorkItemState);
const createComponent = ({
workItemId = workItemQueryResponse.data.workItem.id,
@ -55,8 +58,10 @@ describe('WorkItemDetail component', () => {
createComponent();
});
it('renders WorkItemTitle in loading state', () => {
expect(findWorkItemTitle().props('loading')).toBe(true);
it('renders skeleton loader', () => {
expect(findSkeleton().exists()).toBe(true);
expect(findWorkItemState().exists()).toBe(false);
expect(findWorkItemTitle().exists()).toBe(false);
});
});
@ -66,8 +71,10 @@ describe('WorkItemDetail component', () => {
return waitForPromises();
});
it('does not render WorkItemTitle in loading state', () => {
expect(findWorkItemTitle().props('loading')).toBe(false);
it('does not render skeleton', () => {
expect(findSkeleton().exists()).toBe(false);
expect(findWorkItemState().exists()).toBe(true);
expect(findWorkItemTitle().exists()).toBe(true);
});
});
@ -82,6 +89,7 @@ describe('WorkItemDetail component', () => {
it('shows an error message when WorkItemTitle emits an `error` event', async () => {
createComponent();
await waitForPromises();
findWorkItemTitle().vm.$emit('error', i18n.updateError);
await waitForPromises();

View File

@ -3,17 +3,9 @@
require 'spec_helper'
RSpec.describe ContainerRegistryHelper do
describe '#container_registry_expiration_policies_throttling?' do
subject { helper.container_registry_expiration_policies_throttling? }
describe '#container_repository_gid_prefix' do
subject { helper.container_repository_gid_prefix }
it { is_expected.to eq(true) }
context 'with container_registry_expiration_policies_throttling disabled' do
before do
stub_feature_flags(container_registry_expiration_policies_throttling: false)
end
it { is_expected.to eq(false) }
end
it { is_expected.to eq('gid://gitlab/ContainerRepository/') }
end
end

View File

@ -0,0 +1,97 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Atlassian::JiraConnect::Jwt::Symmetric do
let(:shared_secret) { 'secret' }
describe '#iss_claim' do
let(:jwt) { Atlassian::Jwt.encode({ iss: '123' }, shared_secret) }
subject { described_class.new(jwt).iss_claim }
it { is_expected.to eq('123') }
context 'invalid JWT' do
let(:jwt) { '123' }
it { is_expected.to eq(nil) }
end
end
describe '#sub_claim' do
let(:jwt) { Atlassian::Jwt.encode({ sub: '123' }, shared_secret) }
subject { described_class.new(jwt).sub_claim }
it { is_expected.to eq('123') }
context 'invalid JWT' do
let(:jwt) { '123' }
it { is_expected.to eq(nil) }
end
end
describe '#valid?' do
subject { described_class.new(jwt).valid?(shared_secret) }
context 'invalid JWT' do
let(:jwt) { '123' }
it { is_expected.to eq(false) }
end
context 'valid JWT' do
let(:jwt) { Atlassian::Jwt.encode({}, shared_secret) }
it { is_expected.to eq(true) }
end
end
describe '#verify_qsh_claim' do
let(:jwt) { Atlassian::Jwt.encode({ qsh: qsh_claim }, shared_secret) }
let(:qsh_claim) do
Atlassian::Jwt.create_query_string_hash('https://gitlab.test/subscriptions', 'GET', 'https://gitlab.test')
end
subject(:verify_qsh_claim) do
described_class.new(jwt).verify_qsh_claim('https://gitlab.test/subscriptions', 'GET', 'https://gitlab.test')
end
it { is_expected.to eq(true) }
context 'qsh does not match' do
let(:qsh_claim) do
Atlassian::Jwt.create_query_string_hash('https://example.com/foo', 'POST', 'https://example.com')
end
it { is_expected.to eq(false) }
end
context 'creating query string hash raises an error' do
let(:qsh_claim) { '123' }
specify do
expect(Atlassian::Jwt).to receive(:create_query_string_hash).and_raise(StandardError)
expect(verify_qsh_claim).to eq(false)
end
end
end
describe '#verify_context_qsh_claim' do
let(:jwt) { Atlassian::Jwt.encode({ qsh: qsh_claim }, shared_secret) }
let(:qsh_claim) { 'context-qsh' }
subject(:verify_context_qsh_claim) { described_class.new(jwt).verify_context_qsh_claim }
it { is_expected.to eq(true) }
context 'jwt does not contain a context qsh' do
let(:qsh_claim) { '123' }
it { is_expected.to eq(false) }
end
end
end

View File

@ -691,22 +691,6 @@ RSpec.describe ContainerRepository, :aggregate_failures do
end
end
describe '#reset_expiration_policy_started_at!' do
subject { repository.reset_expiration_policy_started_at! }
before do
repository.start_expiration_policy!
end
it 'resets the expiration policy started at' do
started_at = repository.expiration_policy_started_at
expect(started_at).not_to be_nil
expect { subject }
.to change { repository.expiration_policy_started_at }.from(started_at).to(nil)
end
end
context 'registry migration' do
before do
allow(repository.gitlab_api_client).to receive(:supports_gitlab_api?).and_return(true)

View File

@ -250,8 +250,7 @@ RSpec.describe API::ProjectContainerRepositories do
name_regex_delete: 'v10.*',
name_regex_keep: 'v10.1.*',
keep_n: 100,
older_than: '1 day',
container_expiration_policy: false }
older_than: '1 day' }
end
let(:lease_key) { "container_repository:cleanup_tags:#{root_repository.id}" }
@ -297,8 +296,7 @@ RSpec.describe API::ProjectContainerRepositories do
name_regex_delete: nil,
name_regex_keep: 'v10.1.*',
keep_n: 100,
older_than: '1 day',
container_expiration_policy: false }
older_than: '1 day' }
end
it 'schedules cleanup of tags repository' do

View File

@ -1,32 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ContainerExpirationPolicyService do
let_it_be(:user) { create(:user) }
let_it_be(:container_expiration_policy) { create(:container_expiration_policy, :runnable) }
let(:project) { container_expiration_policy.project }
let(:container_repository) { create(:container_repository, project: project) }
before do
project.add_maintainer(user)
end
describe '#execute' do
subject { described_class.new(project, user).execute(container_expiration_policy) }
it 'kicks off a cleanup worker for the container repository' do
expect(CleanupContainerRepositoryWorker).to receive(:perform_async)
.with(nil, container_repository.id, hash_including(container_expiration_policy: true))
subject
end
it 'sets next_run_at on the container_expiration_policy' do
subject
expect(container_expiration_policy.next_run_at).to be > Time.zone.now
end
end
end

View File

@ -34,8 +34,6 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_
stub_digest_config('sha256:configB', 5.days.ago)
stub_digest_config('sha256:configC', 1.month.ago)
stub_digest_config('sha256:configD', nil)
stub_feature_flags(container_registry_expiration_policies_throttling: false)
end
describe '#execute' do
@ -334,24 +332,17 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService, :clean_gitlab_
end
end
where(:feature_flag_enabled, :max_list_size, :delete_tags_service_status, :expected_status, :expected_truncated) do
false | 10 | :success | :success | false
false | 10 | :error | :error | false
false | 3 | :success | :success | false
false | 3 | :error | :error | false
false | 0 | :success | :success | false
false | 0 | :error | :error | false
true | 10 | :success | :success | false
true | 10 | :error | :error | false
true | 3 | :success | :error | true
true | 3 | :error | :error | true
true | 0 | :success | :success | false
true | 0 | :error | :error | false
where(:max_list_size, :delete_tags_service_status, :expected_status, :expected_truncated) do
10 | :success | :success | false
10 | :error | :error | false
3 | :success | :error | true
3 | :error | :error | true
0 | :success | :success | false
0 | :error | :error | false
end
with_them do
before do
stub_feature_flags(container_registry_expiration_policies_throttling: feature_flag_enabled)
stub_application_setting(container_registry_cleanup_tags_service_max_list_size: max_list_size)
allow_next_instance_of(Projects::ContainerRepository::DeleteTagsService) do |service|
expect(service).to receive(:execute).and_return(status: delete_tags_service_status)

View File

@ -134,10 +134,6 @@ RSpec.describe Projects::ContainerRepository::DeleteTagsService do
subject { service.execute(repository) }
before do
stub_feature_flags(container_registry_expiration_policies_throttling: false)
end
context 'without permissions' do
it { is_expected.to include(status: :error) }
end

View File

@ -12,10 +12,6 @@ RSpec.describe Projects::ContainerRepository::Gitlab::DeleteTagsService do
subject { service.execute }
before do
stub_feature_flags(container_registry_expiration_policies_throttling: false)
end
RSpec.shared_examples 'deleting tags' do
it 'deletes the tags by name' do
stub_delete_reference_requests(tags)
@ -26,6 +22,8 @@ RSpec.describe Projects::ContainerRepository::Gitlab::DeleteTagsService do
end
context 'with tags to delete' do
let(:timeout) { 10 }
it_behaves_like 'deleting tags'
it 'succeeds when tag delete returns 404' do
@ -50,60 +48,53 @@ RSpec.describe Projects::ContainerRepository::Gitlab::DeleteTagsService do
end
end
context 'with throttling enabled' do
let(:timeout) { 10 }
before do
stub_application_setting(container_registry_delete_tags_service_timeout: timeout)
end
before do
stub_feature_flags(container_registry_expiration_policies_throttling: true)
stub_application_setting(container_registry_delete_tags_service_timeout: timeout)
end
it_behaves_like 'deleting tags'
context 'with timeout' do
context 'set to a valid value' do
before do
allow(Time.zone).to receive(:now).and_return(10, 15, 25) # third call to Time.zone.now will be triggering the timeout
stub_delete_reference_requests('A' => 200)
end
it { is_expected.to eq(status: :error, message: 'error while deleting tags', deleted: ['A'], exception_class_name: Projects::ContainerRepository::Gitlab::DeleteTagsService::TimeoutError.name) }
it 'tracks the exception' do
expect(::Gitlab::ErrorTracking)
.to receive(:track_exception).with(::Projects::ContainerRepository::Gitlab::DeleteTagsService::TimeoutError, tags_count: tags.size, container_repository_id: repository.id)
subject
end
end
context 'set to 0' do
let(:timeout) { 0 }
it_behaves_like 'deleting tags'
end
context 'set to nil' do
let(:timeout) { nil }
it_behaves_like 'deleting tags'
end
end
context 'with a network error' do
context 'with timeout' do
context 'set to a valid value' do
before do
expect(service).to receive(:delete_tags).and_raise(::Faraday::TimeoutError)
allow(Time.zone).to receive(:now).and_return(10, 15, 25) # third call to Time.zone.now will be triggering the timeout
stub_delete_reference_requests('A' => 200)
end
it { is_expected.to eq(status: :error, message: 'error while deleting tags', deleted: [], exception_class_name: ::Faraday::TimeoutError.name) }
it { is_expected.to eq(status: :error, message: 'error while deleting tags', deleted: ['A'], exception_class_name: Projects::ContainerRepository::Gitlab::DeleteTagsService::TimeoutError.name) }
it 'tracks the exception' do
expect(::Gitlab::ErrorTracking)
.to receive(:track_exception).with(::Faraday::TimeoutError, tags_count: tags.size, container_repository_id: repository.id)
.to receive(:track_exception).with(::Projects::ContainerRepository::Gitlab::DeleteTagsService::TimeoutError, tags_count: tags.size, container_repository_id: repository.id)
subject
end
end
context 'set to 0' do
let(:timeout) { 0 }
it_behaves_like 'deleting tags'
end
context 'set to nil' do
let(:timeout) { nil }
it_behaves_like 'deleting tags'
end
end
context 'with a network error' do
before do
expect(service).to receive(:delete_tags).and_raise(::Faraday::TimeoutError)
end
it { is_expected.to eq(status: :error, message: 'error while deleting tags', deleted: [], exception_class_name: ::Faraday::TimeoutError.name) }
it 'tracks the exception' do
expect(::Gitlab::ErrorTracking)
.to receive(:track_exception).with(::Faraday::TimeoutError, tags_count: tags.size, container_repository_id: repository.id)
subject
end
end
end

View File

@ -13,11 +13,11 @@ RSpec.describe CleanupContainerRepositoryWorker, :clean_gitlab_redis_shared_stat
let(:service) { instance_double(Projects::ContainerRepository::CleanupTagsService) }
context 'bulk delete api' do
let(:params) { { key: 'value', 'container_expiration_policy' => false } }
let(:params) { { key: 'value' } }
it 'executes the destroy service' do
expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new)
.with(repository, user, params.merge('container_expiration_policy' => false))
.with(repository, user, params)
.and_return(service)
expect(service).to receive(:execute)
@ -36,40 +36,5 @@ RSpec.describe CleanupContainerRepositoryWorker, :clean_gitlab_redis_shared_stat
end.not_to raise_error
end
end
context 'container expiration policy' do
let(:params) { { key: 'value', 'container_expiration_policy' => true } }
before do
allow(ContainerRepository)
.to receive(:find_by_id).with(repository.id).and_return(repository)
end
it 'executes the destroy service' do
expect(repository).to receive(:start_expiration_policy!).and_call_original
expect(repository).to receive(:reset_expiration_policy_started_at!).and_call_original
expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new)
.with(repository, nil, params.merge('container_expiration_policy' => true))
.and_return(service)
expect(service).to receive(:execute).and_return(status: :success)
subject.perform(nil, repository.id, params)
expect(repository.reload.expiration_policy_started_at).to be_nil
end
it "doesn't reset the expiration policy started at if the destroy service returns an error" do
expect(repository).to receive(:start_expiration_policy!).and_call_original
expect(repository).not_to receive(:reset_expiration_policy_started_at!)
expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new)
.with(repository, nil, params.merge('container_expiration_policy' => true))
.and_return(service)
expect(service).to receive(:execute).and_return(status: :error, message: 'timeout while deleting tags')
subject.perform(nil, repository.id, params)
expect(repository.reload.expiration_policy_started_at).not_to be_nil
end
end
end
end

View File

@ -524,13 +524,5 @@ RSpec.describe ContainerExpirationPolicies::CleanupContainerRepositoryWorker do
end
it { is_expected.to eq(capacity) }
context 'with feature flag disabled' do
before do
stub_feature_flags(container_registry_expiration_policies_throttling: false)
end
it { is_expected.to eq(0) }
end
end
end

View File

@ -11,15 +11,13 @@ RSpec.describe ContainerExpirationPolicyWorker do
describe '#perform' do
subject { worker.perform }
shared_examples 'not executing any policy' do
it 'does not run any policy' do
expect(ContainerExpirationPolicyService).not_to receive(:new)
context 'process cleanups' do
it 'calls the limited capacity worker' do
expect(ContainerExpirationPolicies::CleanupContainerRepositoryWorker).to receive(:perform_with_capacity)
subject
end
end
shared_examples 'handling a taken exclusive lease' do
context 'with exclusive lease taken' do
before do
stub_exclusive_lease_taken(worker.lease_key, timeout: 5.hours)
@ -34,82 +32,6 @@ RSpec.describe ContainerExpirationPolicyWorker do
end
end
context 'with throttling enabled' do
before do
stub_feature_flags(container_registry_expiration_policies_throttling: true)
end
it 'calls the limited capacity worker' do
expect(ContainerExpirationPolicies::CleanupContainerRepositoryWorker).to receive(:perform_with_capacity)
subject
end
it_behaves_like 'handling a taken exclusive lease'
end
context 'with throttling disabled' do
before do
stub_feature_flags(container_registry_expiration_policies_throttling: false)
end
context 'with no container expiration policies' do
it_behaves_like 'not executing any policy'
end
context 'with container expiration policies' do
let_it_be(:container_expiration_policy, reload: true) { create(:container_expiration_policy, :runnable) }
let_it_be(:container_repository) { create(:container_repository, project: container_expiration_policy.project) }
context 'a valid policy' do
it 'runs the policy' do
expect(ContainerExpirationPolicyService)
.to receive(:new).with(container_expiration_policy.project, nil).and_call_original
expect(CleanupContainerRepositoryWorker).to receive(:perform_async).once.and_call_original
expect { subject }.not_to raise_error
end
end
context 'a disabled policy' do
before do
container_expiration_policy.disable!
end
it_behaves_like 'not executing any policy'
end
context 'a policy that is not due for a run' do
before do
container_expiration_policy.update_column(:next_run_at, 2.minutes.from_now)
end
it_behaves_like 'not executing any policy'
end
context 'a policy linked to no container repository' do
before do
container_expiration_policy.container_repositories.delete_all
end
it_behaves_like 'not executing any policy'
end
context 'an invalid policy' do
before do
container_expiration_policy.update_column(:name_regex, '*production')
end
it 'disables the policy and tracks an error' do
expect(ContainerExpirationPolicyService).not_to receive(:new).with(container_expiration_policy, nil)
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(instance_of(described_class::InvalidPolicyError), container_expiration_policy_id: container_expiration_policy.id)
expect { subject }.to change { container_expiration_policy.reload.enabled }.from(true).to(false)
end
end
end
end
context 'process stale ongoing cleanups' do
let_it_be(:stuck_cleanup) { create(:container_repository, :cleanup_ongoing, expiration_policy_started_at: 1.day.ago) }
let_it_be(:container_repository1) { create(:container_repository, :cleanup_scheduled) }