Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-11-20 21:18:24 +00:00
parent 40ea82e97a
commit d88fd39bb7
99 changed files with 1016 additions and 848 deletions

View File

@ -2699,7 +2699,7 @@
when: never
- if: '$DEPENDENCY_SCANNING_DISABLED || $GITLAB_FEATURES !~ /\bdependency_scanning\b/ || $DS_EXCLUDED_ANALYZERS =~ /gemnasium([^-]|$)/'
when: never
# Run Dependency Scanning on master until https://gitlab.com/gitlab-org/gitlab/-/issues/361657 is resolved
# Run Dependency Scanning on master until https://gitlab.com/gitlab-org/gitlab/-/issues/504908#note_2218591981 is resolved
- <<: *if-default-branch-refs
- <<: *if-default-refs
changes: *dependency-patterns
@ -2710,7 +2710,7 @@
when: never
- if: '$DEPENDENCY_SCANNING_DISABLED || $GITLAB_FEATURES !~ /\bdependency_scanning\b/ || $DS_EXCLUDED_ANALYZERS =~ /gemnasium-python/'
when: never
# Run Dependency Scanning on master until https://gitlab.com/gitlab-org/gitlab/-/issues/361657 is resolved
# Run Dependency Scanning on master until https://gitlab.com/gitlab-org/gitlab/-/issues/504908#note_2218591981 is resolved
- <<: *if-default-branch-refs
- <<: *if-default-refs
changes: *python-patterns

View File

@ -436,14 +436,6 @@ Gitlab/BoundedContexts:
- 'app/graphql/types/container_expiration_policy_keep_enum.rb'
- 'app/graphql/types/container_expiration_policy_older_than_enum.rb'
- 'app/graphql/types/container_expiration_policy_type.rb'
- 'app/graphql/types/container_repository_cleanup_status_enum.rb'
- 'app/graphql/types/container_repository_details_type.rb'
- 'app/graphql/types/container_repository_referrer_type.rb'
- 'app/graphql/types/container_repository_sort_enum.rb'
- 'app/graphql/types/container_repository_status_enum.rb'
- 'app/graphql/types/container_repository_tag_type.rb'
- 'app/graphql/types/container_repository_tags_sort_enum.rb'
- 'app/graphql/types/container_repository_type.rb'
- 'app/graphql/types/countable_connection_type.rb'
- 'app/graphql/types/current_user_todos.rb'
- 'app/graphql/types/current_user_type.rb'

View File

@ -2,7 +2,6 @@
# Cop supports --autocorrect.
Graphql/Descriptions:
Exclude:
- 'app/graphql/types/container_repository_type.rb'
- 'app/graphql/types/deployment_tag_type.rb'
- 'app/graphql/types/design_management/design_at_version_type.rb'
- 'app/graphql/types/design_management/design_fields.rb'

View File

@ -27,7 +27,6 @@ GraphQL/ExtractType:
- 'app/graphql/types/commit_type.rb'
- 'app/graphql/types/container_expiration_policy_type.rb'
- 'app/graphql/types/container_registry/protection/rule_type.rb'
- 'app/graphql/types/container_repository_type.rb'
- 'app/graphql/types/diff_type.rb'
- 'app/graphql/types/environment_type.rb'
- 'app/graphql/types/error_tracking/sentry_detailed_error_type.rb'

View File

@ -21,8 +21,6 @@ Graphql/ResourceNotAvailableError:
- 'app/graphql/resolvers/kas/agent_configurations_resolver.rb'
- 'app/graphql/resolvers/kas/agent_connections_resolver.rb'
- 'app/graphql/resolvers/projects/snippets_resolver.rb'
- 'app/graphql/types/container_repository_details_type.rb'
- 'app/graphql/types/container_repository_type.rb'
- 'ee/app/graphql/mutations/ai/action.rb'
- 'ee/app/graphql/mutations/audit_events/instance_external_audit_event_destinations/base.rb'
- 'ee/app/graphql/mutations/issues/set_escalation_policy.rb'

View File

@ -123,7 +123,6 @@ Layout/ArrayAlignment:
- 'spec/graphql/resolvers/project_issues_resolver_spec.rb'
- 'spec/graphql/types/blob_viewer_type_spec.rb'
- 'spec/graphql/types/boards/board_issue_input_type_spec.rb'
- 'spec/graphql/types/container_repository_details_type_spec.rb'
- 'spec/graphql/types/container_repository_type_spec.rb'
- 'spec/graphql/types/issuable_sort_enum_spec.rb'
- 'spec/graphql/types/issue_type_spec.rb'

View File

@ -63,9 +63,6 @@ Layout/LineLength:
- 'app/graphql/types/ci/runner_type.rb'
- 'app/graphql/types/ci/runner_web_url_edge.rb'
- 'app/graphql/types/container_expiration_policy_type.rb'
- 'app/graphql/types/container_repository_cleanup_status_enum.rb'
- 'app/graphql/types/container_repository_details_type.rb'
- 'app/graphql/types/container_repository_type.rb'
- 'app/graphql/types/dependency_proxy/group_setting_type.rb'
- 'app/graphql/types/dependency_proxy/image_ttl_group_policy_type.rb'
- 'app/graphql/types/environment_type.rb'

View File

@ -105,8 +105,6 @@ RSpec/ContainExactly:
- 'spec/graphql/types/ci/pipeline_schedule_sort_enum_spec.rb'
- 'spec/graphql/types/ci/pipeline_scope_enum_spec.rb'
- 'spec/graphql/types/ci/pipeline_status_enum_spec.rb'
- 'spec/graphql/types/container_repository_cleanup_status_enum_spec.rb'
- 'spec/graphql/types/container_repository_status_enum_spec.rb'
- 'spec/graphql/types/issuable_searchable_field_enum_spec.rb'
- 'spec/graphql/types/issuable_severity_enum_spec.rb'
- 'spec/graphql/types/merge_requests/mergeability_check_identifier_enum_spec.rb'

View File

@ -1616,10 +1616,6 @@ RSpec/FeatureCategory:
- 'spec/graphql/types/container_expiration_policy_keep_enum_spec.rb'
- 'spec/graphql/types/container_expiration_policy_older_than_enum_spec.rb'
- 'spec/graphql/types/container_expiration_policy_type_spec.rb'
- 'spec/graphql/types/container_repository_cleanup_status_enum_spec.rb'
- 'spec/graphql/types/container_repository_sort_enum_spec.rb'
- 'spec/graphql/types/container_repository_status_enum_spec.rb'
- 'spec/graphql/types/container_respository_tags_sort_enum_spec.rb'
- 'spec/graphql/types/countable_connection_type_spec.rb'
- 'spec/graphql/types/current_user_todos_type_spec.rb'
- 'spec/graphql/types/custom_emoji_type_spec.rb'

View File

@ -12,7 +12,6 @@ Style/HashEachMethods:
- 'app/graphql/types/ci/runner_access_level_enum.rb'
- 'app/graphql/types/ci/variable_type_enum.rb'
- 'app/graphql/types/clusters/agent_token_status_enum.rb'
- 'app/graphql/types/container_repository_status_enum.rb'
- 'app/graphql/types/data_visualization_palette/color_enum.rb'
- 'app/graphql/types/data_visualization_palette/weight_enum.rb'
- 'app/graphql/types/dependency_proxy/manifest_type_enum.rb'

View File

@ -84,7 +84,7 @@
{"name":"coderay","version":"1.1.3","platform":"ruby","checksum":"dc530018a4684512f8f38143cd2a096c9f02a1fc2459edcfe534787a7fc77d4b"},
{"name":"coercible","version":"1.0.0","platform":"ruby","checksum":"5081ad24352cc8435ce5472bc2faa30260c7ea7f2102cc6a9f167c4d9bffaadc"},
{"name":"colored2","version":"3.1.2","platform":"ruby","checksum":"b13c2bd7eeae2cf7356a62501d398e72fde78780bd26aec6a979578293c28b4a"},
{"name":"commonmarker","version":"0.23.10","platform":"ruby","checksum":"fdd312ae2bb4071b2f3085d4d7533cb9f8d9057a2eaa0760228a65bc3ed565d1"},
{"name":"commonmarker","version":"0.23.11","platform":"ruby","checksum":"9d1d35d358740151bce29235aebfecc63314fb57dd89a83e72d4061b4fe3d2bf"},
{"name":"concurrent-ruby","version":"1.2.3","platform":"ruby","checksum":"82fdd3f8a0816e28d513e637bb2b90a45d7b982bdf4f3a0511722d2e495801e2"},
{"name":"connection_pool","version":"2.4.1","platform":"ruby","checksum":"0f40cf997091f1f04ff66da67eabd61a9fe0d4928b9a3645228532512fab62f4"},
{"name":"console","version":"1.25.2","platform":"ruby","checksum":"460fbf8c1b0e527b2c275448b76f91c3e9fb72e6bead5d27fb5a638fc191e943"},

View File

@ -437,7 +437,7 @@ GEM
coercible (1.0.0)
descendants_tracker (~> 0.0.1)
colored2 (3.1.2)
commonmarker (0.23.10)
commonmarker (0.23.11)
concurrent-ruby (1.2.3)
connection_pool (2.4.1)
console (1.25.2)

View File

@ -84,7 +84,7 @@
{"name":"coderay","version":"1.1.3","platform":"ruby","checksum":"dc530018a4684512f8f38143cd2a096c9f02a1fc2459edcfe534787a7fc77d4b"},
{"name":"coercible","version":"1.0.0","platform":"ruby","checksum":"5081ad24352cc8435ce5472bc2faa30260c7ea7f2102cc6a9f167c4d9bffaadc"},
{"name":"colored2","version":"3.1.2","platform":"ruby","checksum":"b13c2bd7eeae2cf7356a62501d398e72fde78780bd26aec6a979578293c28b4a"},
{"name":"commonmarker","version":"0.23.10","platform":"ruby","checksum":"fdd312ae2bb4071b2f3085d4d7533cb9f8d9057a2eaa0760228a65bc3ed565d1"},
{"name":"commonmarker","version":"0.23.11","platform":"ruby","checksum":"9d1d35d358740151bce29235aebfecc63314fb57dd89a83e72d4061b4fe3d2bf"},
{"name":"concurrent-ruby","version":"1.2.3","platform":"ruby","checksum":"82fdd3f8a0816e28d513e637bb2b90a45d7b982bdf4f3a0511722d2e495801e2"},
{"name":"connection_pool","version":"2.4.1","platform":"ruby","checksum":"0f40cf997091f1f04ff66da67eabd61a9fe0d4928b9a3645228532512fab62f4"},
{"name":"console","version":"1.25.2","platform":"ruby","checksum":"460fbf8c1b0e527b2c275448b76f91c3e9fb72e6bead5d27fb5a638fc191e943"},
@ -410,7 +410,7 @@
{"name":"murmurhash3","version":"0.1.7","platform":"ruby","checksum":"370a2ce2e9ab0711e51554e530b5f63956927a6554a296855f42a1a4a5ed0936"},
{"name":"mustermann","version":"3.0.0","platform":"ruby","checksum":"6d3569aa3c3b2f048c60626f48d9b2d561cc8d2ef269296943b03da181c08b67"},
{"name":"mustermann-grape","version":"1.0.2","platform":"ruby","checksum":"6f5309d6a338f801f211c644e8c2d3cc2577a8693f9cd51dadfdb29c1260f5fe"},
{"name":"mutex_m","version":"0.2.0","platform":"ruby","checksum":"b6ef0c6c842ede846f2ec0ade9e266b1a9dac0bc151682b04835e8ebd54840d5"},
{"name":"mutex_m","version":"0.3.0","platform":"ruby","checksum":"cfcb04ac16b69c4813777022fdceda24e9f798e48092a2b817eb4c0a782b0751"},
{"name":"nap","version":"1.1.0","platform":"ruby","checksum":"949691660f9d041d75be611bb2a8d2fd559c467537deac241f4097d9b5eea576"},
{"name":"neighbor","version":"0.3.2","platform":"ruby","checksum":"b795bbcc24b1b9ae82d9f7e97a3461b0b3607d24a85a7acbed776bd498e7eba8"},
{"name":"nenv","version":"0.3.0","platform":"ruby","checksum":"d9de6d8fb7072228463bf61843159419c969edb34b3cef51832b516ae7972765"},

View File

@ -446,7 +446,7 @@ GEM
coercible (1.0.0)
descendants_tracker (~> 0.0.1)
colored2 (3.1.2)
commonmarker (0.23.10)
commonmarker (0.23.11)
concurrent-ruby (1.2.3)
connection_pool (2.4.1)
console (1.25.2)
@ -1177,7 +1177,7 @@ GEM
ruby2_keywords (~> 0.0.1)
mustermann-grape (1.0.2)
mustermann (>= 1.0.0)
mutex_m (0.2.0)
mutex_m (0.3.0)
nap (1.1.0)
neighbor (0.3.2)
activerecord (>= 6.1)

View File

@ -1,6 +1,6 @@
<script>
import { GlLink } from '@gitlab/ui';
import { INSTRUMENT_TODO_ITEM_FOLLOW, TODO_STATE_DONE, TODO_STATE_PENDING } from '../constants';
import { INSTRUMENT_TODO_ITEM_FOLLOW, TODO_STATE_DONE } from '../constants';
import TodoItemTitle from './todo_item_title.vue';
import TodoItemBody from './todo_item_body.vue';
import TodoItemTimestamp from './todo_item_timestamp.vue';
@ -15,6 +15,7 @@ export default {
TodoItemTimestamp,
TodoItemActions,
},
inject: ['currentTab'],
props: {
currentUserId: {
type: String,
@ -29,9 +30,6 @@ export default {
isDone() {
return this.todo.state === TODO_STATE_DONE;
},
isPending() {
return this.todo.state === TODO_STATE_PENDING;
},
targetUrl() {
return this.todo.targetUrl;
},
@ -46,6 +44,7 @@ export default {
<li
class="gl-border-t gl-border-b gl-relative -gl-mt-px gl-block gl-px-5 gl-py-3 hover:gl-z-1 hover:gl-cursor-pointer hover:gl-border-blue-200 hover:gl-bg-blue-50"
:data-testid="`todo-item-${todo.id}`"
:class="{ 'gl-bg-subtle': isDone }"
>
<gl-link
:href="targetUrl"

View File

@ -3,12 +3,7 @@ import { GlButton, GlTooltipDirective } from '@gitlab/ui';
import { reportToSentry } from '~/ci/utils';
import { s__ } from '~/locale';
import Tracking from '~/tracking';
import {
INSTRUMENT_TODO_ITEM_CLICK,
TAB_ALL,
TODO_STATE_DONE,
TODO_STATE_PENDING,
} from '../constants';
import { INSTRUMENT_TODO_ITEM_CLICK, TODO_STATE_DONE, TODO_STATE_PENDING } from '../constants';
import markAsDoneMutation from './mutations/mark_as_done.mutation.graphql';
import markAsPendingMutation from './mutations/mark_as_pending.mutation.graphql';
@ -20,18 +15,12 @@ export default {
GlTooltip: GlTooltipDirective,
},
mixins: [Tracking.mixin()],
inject: ['currentTab'],
props: {
todo: {
type: Object,
required: true,
},
},
data() {
return {
isLoading: false,
};
},
computed: {
isDone() {
return this.todo.state === TODO_STATE_DONE;
@ -40,11 +29,6 @@ export default {
return this.todo.state === TODO_STATE_PENDING;
},
tooltipTitle() {
// Setting this to null while loading, combined with keeping the
// loading state till the item gets removed, prevents the tooltip
// text changing with the item state before the item gets removed.
if (this.isLoading) return null;
return this.isDone ? this.$options.i18n.markAsPending : this.$options.i18n.markAsDone;
},
},
@ -77,13 +61,21 @@ export default {
const showError = this.isDone ? this.showMarkAsPendingError : this.showMarkAsDoneError;
try {
this.isLoading = true;
const { data } = await this.$apollo.mutate({
mutation,
variables: {
todoId: this.todo.id,
},
optimisticResponse: {
toggleStatus: {
todo: {
id: this.todo.id,
state: this.isDone ? TODO_STATE_PENDING : TODO_STATE_DONE,
__typename: 'Todo',
},
errors: [],
},
},
});
if (data.errors?.length > 0) {
@ -95,16 +87,6 @@ export default {
} catch (failure) {
reportToSentry(this.$options.name, failure);
showError();
this.isLoading = false;
} finally {
// Only stop loading spinner when on "All" tab.
// On the other tabs (Pending/Done) we want the loading to continue
// until the todos query finished, removing this item from the list.
// This way we hide the state change, which would otherwise update
// the button's icon before it gets removed.
if (this.currentTab === TAB_ALL) {
this.isLoading = false;
}
}
},
},
@ -119,7 +101,6 @@ export default {
<gl-button
v-gl-tooltip.hover
:icon="isDone ? 'redo' : 'check'"
:loading="isLoading"
:aria-label="isDone ? $options.i18n.markAsPending : $options.i18n.markAsDone"
:title="tooltipTitle"
@click.prevent="toggleStatus"

View File

@ -17,13 +17,12 @@ import Tracking from '~/tracking';
import {
INSTRUMENT_TAB_LABELS,
INSTRUMENT_TODO_FILTER_CHANGE,
INSTRUMENT_TODO_ITEM_CLICK,
STATUS_BY_TAB,
TAB_PENDING,
TODO_WAIT_BEFORE_RELOAD,
} from '~/todos/constants';
import getTodosQuery from './queries/get_todos.query.graphql';
import getPendingTodosCount from './queries/get_pending_todos_count.query.graphql';
import markAsDoneMutation from './mutations/mark_as_done.mutation.graphql';
import markAsPendingMutation from './mutations/mark_as_pending.mutation.graphql';
import TodoItem from './todo_item.vue';
import TodosEmptyState from './todos_empty_state.vue';
import TodosFilterBar, { SORT_OPTIONS } from './todos_filter_bar.vue';
@ -56,6 +55,8 @@ export default {
},
data() {
return {
updatePid: null,
needsRefresh: false,
cursor: {
first: ENTRIES_PER_PAGE,
after: null,
@ -99,6 +100,10 @@ export default {
this.alert = createAlert({ message: s__('Todos|Something went wrong. Please try again.') });
Sentry.captureException(error);
},
watchLoading() {
// We reset the `needsRefresh` when paginating or changing tabs
this.needsRefresh = false;
},
},
pendingTodosCount: {
query: getPendingTodosCount,
@ -129,7 +134,7 @@ export default {
return !this.isLoading && this.todos.length === 0;
},
showMarkAllAsDone() {
return this.currentTab === 0 && !this.showEmptyState;
return this.currentTab === TAB_PENDING && !this.showEmptyState;
},
},
mounted() {
@ -176,27 +181,10 @@ export default {
this.updateAllQueries(false);
}
},
async handleItemChanged(id, markedAsDone) {
await this.updateAllQueries(false);
this.showUndoToast(id, markedAsDone);
},
showUndoToast(todoId, markedAsDone) {
const message = markedAsDone ? s__('Todos|Marked as done') : s__('Todos|Marked as undone');
const mutation = markedAsDone ? markAsPendingMutation : markAsDoneMutation;
async handleItemChanged() {
this.needsRefresh = true;
const { hide } = this.$toast.show(message, {
action: {
text: s__('Todos|Undo'),
onClick: async () => {
hide();
await this.$apollo.mutate({ mutation, variables: { todoId } });
this.track(INSTRUMENT_TODO_ITEM_CLICK, {
label: markedAsDone ? 'undo_mark_done' : 'undo_mark_pending',
});
this.updateAllQueries(false);
},
},
});
await this.updateCounts();
},
updateCounts() {
return this.$apollo.queries.pendingTodosCount.refetch();
@ -209,6 +197,29 @@ export default {
this.showSpinnerWhileLoading = true;
},
markInteracting() {
clearTimeout(this.updatePid);
},
stoppedInteracting() {
if (!this.needsRefresh) {
return;
}
if (this.updatePid) {
clearTimeout(this.updatePid);
}
this.updatePid = setTimeout(() => {
/*
We double-check needsRefresh or
whether a query is already running
*/
if (this.needsRefresh && !this.$apollo.queries.todos.loading) {
this.updateAllQueries(false);
}
this.updatePid = null;
}, TODO_WAIT_BEFORE_RELOAD);
},
},
};
</script>
@ -262,7 +273,13 @@ export default {
<div>
<div class="gl-flex gl-flex-col">
<gl-loading-icon v-if="isLoading && showSpinnerWhileLoading" size="lg" class="gl-mt-5" />
<ul v-else class="gl-m-0 gl-border-collapse gl-list-none gl-p-0">
<ul
v-else
data-testid="todo-item-list-container"
class="gl-m-0 gl-border-collapse gl-list-none gl-p-0"
@mouseenter="markInteracting"
@mouseleave="stoppedInteracting"
>
<transition-group name="todos">
<todo-item
v-for="todo in todos"

View File

@ -49,3 +49,5 @@ export const INSTRUMENT_TODO_SORT_CHANGE = 'sort_todo_list';
export const INSTRUMENT_TODO_FILTER_CHANGE = 'filter_todo_list';
export const INSTRUMENT_TAB_LABELS = ['status_pending', 'status_done', 'status_all'];
export const TODO_WAIT_BEFORE_RELOAD = 1 * 1000; // 1 seconds

View File

@ -56,6 +56,7 @@ export default {
},
data() {
return {
number: undefined,
hasAppeared: false,
};
},

View File

@ -13,7 +13,7 @@ module Mutations
description: 'ID of the container repository.'
field :container_repository,
Types::ContainerRepositoryType,
Types::ContainerRegistry::ContainerRepositoryType,
null: false,
description: 'Container repository policy after scheduling the deletion.'

View File

@ -4,13 +4,13 @@ module Resolvers
class ContainerRepositoriesResolver < BaseResolver
include ::Mutations::PackageEventable
type Types::ContainerRepositoryType, null: true
type Types::ContainerRegistry::ContainerRepositoryType, null: true
argument :name, GraphQL::Types::String,
required: false,
description: 'Filter the container repositories by their name.'
argument :sort, Types::ContainerRepositorySortEnum,
argument :sort, Types::ContainerRegistry::ContainerRepositorySortEnum,
description: 'Sort container repositories by the criteria.',
required: false,
default_value: :created_desc

View File

@ -2,9 +2,9 @@
module Resolvers
class ContainerRepositoryTagsResolver < BaseResolver
type Types::ContainerRepositoryTagType.connection_type, null: true
type Types::ContainerRegistry::ContainerRepositoryTagType.connection_type, null: true
argument :sort, Types::ContainerRepositoryTagsSortEnum,
argument :sort, Types::ContainerRegistry::ContainerRepositoryTagsSortEnum,
description: 'Sort tags by these criteria.',
required: false,
default_value: nil
@ -82,7 +82,7 @@ module Resolvers
end
def sort_tags(to_be_sorted, sort)
raise StandardError unless Types::ContainerRepositoryTagsSortEnum.enum.include?(sort)
raise StandardError unless Types::ContainerRegistry::ContainerRepositoryTagsSortEnum.enum.include?(sort)
sort_value, _, direction = sort.to_s.rpartition('_')

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
module Types
module ContainerRegistry
class ContainerRepositoryCleanupStatusEnum < BaseEnum
graphql_name 'ContainerRepositoryCleanupStatus'
description 'Status of the tags cleanup of a container repository'
value 'UNSCHEDULED', value: 'cleanup_unscheduled',
description: 'Tags cleanup is not scheduled. This is the default state.'
value 'SCHEDULED', value: 'cleanup_scheduled',
description: 'Tags cleanup is scheduled and is going to be executed shortly.'
value 'UNFINISHED', value: 'cleanup_unfinished',
description: 'Tags cleanup has been partially executed. There are still remaining tags to delete.'
value 'ONGOING', value: 'cleanup_ongoing', description: 'Tags cleanup is ongoing.'
end
end
end

View File

@ -0,0 +1,69 @@
# frozen_string_literal: true
module Types
module ContainerRegistry
class ContainerRepositoryDetailsType < Types::ContainerRegistry::ContainerRepositoryType # rubocop:disable Graphql/AuthorizeTypes -- authorization is inherited from the parent: ContainerRepositoryType
graphql_name 'ContainerRepositoryDetails'
include Gitlab::Graphql::Authorize::AuthorizeResource
description 'Details of a container repository'
field :tags,
Types::ContainerRegistry::ContainerRepositoryTagType.connection_type,
null: true,
description: 'Tags of the container repository.',
max_page_size: 20,
resolver: Resolvers::ContainerRepositoryTagsResolver,
connection_extension: Gitlab::Graphql::Extensions::ExternallyPaginatedArrayExtension
field :manifest, GraphQL::Types::String,
null: true,
description: 'An image manifest from the container repository.' do
argument :reference, GraphQL::Types::String,
required: true,
description: 'Tag name or digest of the manifest.'
end
field :size,
GraphQL::Types::Float,
null: true,
description:
'Deduplicated size of the image repository in bytes. ' \
'This is only available on GitLab.com for repositories created after `2021-11-04`.'
field :last_published_at,
Types::TimeType,
null: true,
description:
'Timestamp when a repository tag was last created or updated. ' \
'Only present for repositories that had tags created or updated after GitLab 16.11.'
def size
handling_errors { object.size }
end
def last_published_at
handling_errors { object.last_published_at }
end
def manifest(reference:)
handling_errors do
manifest = object.image_manifest(reference)
manifest.as_json if manifest
end
end
private
def handling_errors
yield
rescue Faraday::Error
raise_resource_not_available_error!(
"Can't connect to the Container Registry. " \
'If this error persists, please review the troubleshooting documentation.'
)
end
end
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
module Types
module ContainerRegistry
class ContainerRepositoryReferrerType < BaseObject
graphql_name 'ContainerRepositoryReferrer'
description 'A referrer for a container repository tag'
authorize :read_container_image
expose_permissions Types::PermissionTypes::ContainerRepositoryTag
field :artifact_type, GraphQL::Types::String, description: 'Artifact type of the referrer.'
field :digest, GraphQL::Types::String, description: 'Digest of the referrer.'
end
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
module Types
module ContainerRegistry
class ContainerRepositorySortEnum < SortEnum
graphql_name 'ContainerRepositorySort'
description 'Values for sorting container repositories'
value 'NAME_ASC', 'Name by ascending order.', value: :name_asc
value 'NAME_DESC', 'Name by descending order.', value: :name_desc
end
end
end

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
module Types
module ContainerRegistry
class ContainerRepositoryStatusEnum < BaseEnum
graphql_name 'ContainerRepositoryStatus'
description 'Status of a container repository'
::ContainerRepository.statuses.each_key do |status|
value status.upcase, value: status, description: "#{status.titleize} status."
end
end
end
end

View File

@ -0,0 +1,28 @@
# frozen_string_literal: true
module Types
module ContainerRegistry
class ContainerRepositoryTagType < BaseObject
graphql_name 'ContainerRepositoryTag'
description 'A tag from a container repository'
authorize :read_container_image
expose_permissions Types::PermissionTypes::ContainerRepositoryTag
field :created_at, Types::TimeType, null: true, description: 'Timestamp when the tag was created.'
field :digest, GraphQL::Types::String, null: true, description: 'Digest of the tag.'
field :location, GraphQL::Types::String, null: false, description: 'URL of the tag.'
field :media_type, GraphQL::Types::String, null: true, description: 'Media type of the tag.'
field :name, GraphQL::Types::String, null: false, description: 'Name of the tag.'
field :path, GraphQL::Types::String, null: false, description: 'Path of the tag.'
field :published_at, Types::TimeType, null: true, description: 'Timestamp when the tag was published.'
field :referrers, [Types::ContainerRegistry::ContainerRepositoryReferrerType], null: true,
description: 'Referrers for the tag.'
field :revision, GraphQL::Types::String, null: true, description: 'Revision of the tag.'
field :short_revision, GraphQL::Types::String, null: true, description: 'Short revision of the tag.'
field :total_size, GraphQL::Types::BigInt, null: true, description: 'Size of the tag.'
end
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
module Types
module ContainerRegistry
class ContainerRepositoryTagsSortEnum < BaseEnum
graphql_name 'ContainerRepositoryTagSort'
description 'Values for sorting tags'
value 'NAME_ASC', 'Ordered by name in ascending order.', value: :name_asc
value 'NAME_DESC', 'Ordered by name in descending order.', value: :name_desc
value 'PUBLISHED_AT_ASC',
'Ordered by published_at in ascending order. Only available for GitLab.com.', value: :published_at_asc
value 'PUBLISHED_AT_DESC',
'Ordered by published_at in descending order. Only available for GitLab.com.', value: :published_at_desc
end
end
end

View File

@ -0,0 +1,82 @@
# frozen_string_literal: true
module Types
module ContainerRegistry
class ContainerRepositoryType < BaseObject
graphql_name 'ContainerRepository'
include Gitlab::Graphql::Authorize::AuthorizeResource
description 'A container repository'
authorize :read_container_image
expose_permissions Types::PermissionTypes::ContainerRepository
field :created_at, Types::TimeType, null: false,
description: 'Timestamp when the container repository was created.'
field :expiration_policy_cleanup_status, Types::ContainerRegistry::ContainerRepositoryCleanupStatusEnum,
null: true,
description: 'Tags cleanup status for the container repository.'
field :expiration_policy_started_at, Types::TimeType, null: true, # rubocop:disable GraphQL/ExtractType -- maintain current type
description: 'Timestamp when the cleanup done by the expiration policy was started on the container repository.'
field :id, GraphQL::Types::ID, null: false, description: 'ID of the container repository.'
field :last_cleanup_deleted_tags_count, GraphQL::Types::Int, null: true,
description: 'Number of deleted tags from the last cleanup.'
field :location, GraphQL::Types::String, null: false, description: 'URL of the container repository.'
field :migration_state, GraphQL::Types::String,
null: false,
description: 'Migration state of the container repository.',
deprecated: {
reason:
'Returns an empty string. This was used for the migration of GitLab.com, which is now complete. ' \
'Not used by Self-managed instances',
milestone: '17.0'
}
field :name, GraphQL::Types::String, null: false, description: 'Name of the container repository.'
field :path, GraphQL::Types::String, null: false, description: 'Path of the container repository.'
field :project, Types::ProjectType, null: false, description: 'Project of the container registry.'
field :protection_rule_exists, GraphQL::Types::Boolean,
null: false,
experiment: { milestone: '17.2' },
description:
'Whether any matching container protection rule exists for the container. ' \
'Available only when feature flag `container_registry_protected_containers` is enabled.'
field :status, Types::ContainerRegistry::ContainerRepositoryStatusEnum, null: true,
description: 'Status of the container repository.'
field :tags_count, GraphQL::Types::Int, null: false, description: 'Number of tags associated with the image.'
field :updated_at, Types::TimeType, null: false,
description: 'Timestamp when the container repository was updated.'
def project
Gitlab::Graphql::Loaders::BatchModelLoader.new(Project, object.project_id).find
end
def tags_count
object.tags_count
rescue Faraday::Error
raise_resource_not_available_error!(
'We are having trouble connecting to the Container Registry. ' \
'If this error persists, please review the troubleshooting documentation.'
)
end
# The migration has now completed and we are cleaning up the migration db columns.
# For backward compatibility, we are keeping this field accessible.
# This field will be removed in 18.0.
def migration_state
''
end
def protection_rule_exists
return false if Feature.disabled?(:container_registry_protected_containers, object.project.root_ancestor)
BatchLoader::GraphQL.for(object.path).batch do |repository_paths, loader|
::ContainerRegistry::Protection::Rule
.for_push_exists_for_multiple_containers(repository_paths: repository_paths, project_id: object.project_id)
.each { |row| loader.call(row['repository_path'], row['protected']) }
end
end
end
end
end

View File

@ -1,13 +0,0 @@
# frozen_string_literal: true
module Types
class ContainerRepositoryCleanupStatusEnum < BaseEnum
graphql_name 'ContainerRepositoryCleanupStatus'
description 'Status of the tags cleanup of a container repository'
value 'UNSCHEDULED', value: 'cleanup_unscheduled', description: 'Tags cleanup is not scheduled. This is the default state.'
value 'SCHEDULED', value: 'cleanup_scheduled', description: 'Tags cleanup is scheduled and is going to be executed shortly.'
value 'UNFINISHED', value: 'cleanup_unfinished', description: 'Tags cleanup has been partially executed. There are still remaining tags to delete.'
value 'ONGOING', value: 'cleanup_ongoing', description: 'Tags cleanup is ongoing.'
end
end

View File

@ -1,60 +0,0 @@
# frozen_string_literal: true
module Types
class ContainerRepositoryDetailsType < Types::ContainerRepositoryType
graphql_name 'ContainerRepositoryDetails'
description 'Details of a container repository'
authorize :read_container_image
field :tags,
Types::ContainerRepositoryTagType.connection_type,
null: true,
description: 'Tags of the container repository.',
max_page_size: 20,
resolver: Resolvers::ContainerRepositoryTagsResolver,
connection_extension: Gitlab::Graphql::Extensions::ExternallyPaginatedArrayExtension
field :manifest, GraphQL::Types::String,
null: true,
description: 'An image manifest from the container repository.' do
argument :reference, GraphQL::Types::String,
required: true,
description: 'Tag name or digest of the manifest.'
end
field :size,
GraphQL::Types::Float,
null: true,
description: 'Deduplicated size of the image repository in bytes. This is only available on GitLab.com for repositories created after `2021-11-04`.'
field :last_published_at,
Types::TimeType,
null: true,
description: 'Timestamp when a repository tag was last created or updated. Only present for repositories that had tags created or updated after GitLab 16.11.'
def size
handling_errors { object.size }
end
def last_published_at
handling_errors { object.last_published_at }
end
def manifest(reference:)
handling_errors do
manifest = object.image_manifest(reference)
manifest.as_json if manifest
end
end
private
def handling_errors
yield
rescue Faraday::Error
raise ::Gitlab::Graphql::Errors::ResourceNotAvailable, "Can't connect to the Container Registry. If this error persists, please review the troubleshooting documentation."
end
end
end

View File

@ -1,16 +0,0 @@
# frozen_string_literal: true
module Types
class ContainerRepositoryReferrerType < BaseObject
graphql_name 'ContainerRepositoryReferrer'
description 'A referrer for a container repository tag'
authorize :read_container_image
expose_permissions Types::PermissionTypes::ContainerRepositoryTag
field :artifact_type, GraphQL::Types::String, description: 'Artifact type of the referrer.'
field :digest, GraphQL::Types::String, description: 'Digest of the referrer.'
end
end

View File

@ -1,11 +0,0 @@
# frozen_string_literal: true
module Types
class ContainerRepositorySortEnum < SortEnum
graphql_name 'ContainerRepositorySort'
description 'Values for sorting container repositories'
value 'NAME_ASC', 'Name by ascending order.', value: :name_asc
value 'NAME_DESC', 'Name by descending order.', value: :name_desc
end
end

View File

@ -1,12 +0,0 @@
# frozen_string_literal: true
module Types
class ContainerRepositoryStatusEnum < BaseEnum
graphql_name 'ContainerRepositoryStatus'
description 'Status of a container repository'
::ContainerRepository.statuses.keys.each do |status|
value status.upcase, value: status, description: "#{status.titleize} status."
end
end
end

View File

@ -1,25 +0,0 @@
# frozen_string_literal: true
module Types
class ContainerRepositoryTagType < BaseObject
graphql_name 'ContainerRepositoryTag'
description 'A tag from a container repository'
authorize :read_container_image
expose_permissions Types::PermissionTypes::ContainerRepositoryTag
field :created_at, Types::TimeType, null: true, description: 'Timestamp when the tag was created.'
field :digest, GraphQL::Types::String, null: true, description: 'Digest of the tag.'
field :location, GraphQL::Types::String, null: false, description: 'URL of the tag.'
field :media_type, GraphQL::Types::String, null: true, description: 'Media type of the tag.'
field :name, GraphQL::Types::String, null: false, description: 'Name of the tag.'
field :path, GraphQL::Types::String, null: false, description: 'Path of the tag.'
field :published_at, Types::TimeType, null: true, description: 'Timestamp when the tag was published.'
field :referrers, [Types::ContainerRepositoryReferrerType], null: true, description: 'Referrers for the tag.'
field :revision, GraphQL::Types::String, null: true, description: 'Revision of the tag.'
field :short_revision, GraphQL::Types::String, null: true, description: 'Short revision of the tag.'
field :total_size, GraphQL::Types::BigInt, null: true, description: 'Size of the tag.'
end
end

View File

@ -1,15 +0,0 @@
# frozen_string_literal: true
module Types
class ContainerRepositoryTagsSortEnum < BaseEnum
graphql_name 'ContainerRepositoryTagSort'
description 'Values for sorting tags'
value 'NAME_ASC', 'Ordered by name in ascending order.', value: :name_asc
value 'NAME_DESC', 'Ordered by name in descending order.', value: :name_desc
value 'PUBLISHED_AT_ASC',
'Ordered by published_at in ascending order. Only available for GitLab.com.', value: :published_at_asc
value 'PUBLISHED_AT_DESC',
'Ordered by published_at in descending order. Only available for GitLab.com.', value: :published_at_desc
end
end

View File

@ -1,66 +0,0 @@
# frozen_string_literal: true
module Types
class ContainerRepositoryType < BaseObject
graphql_name 'ContainerRepository'
description 'A container repository'
authorize :read_container_image
expose_permissions Types::PermissionTypes::ContainerRepository
field :created_at, Types::TimeType, null: false, description: 'Timestamp when the container repository was created.'
field :expiration_policy_cleanup_status, Types::ContainerRepositoryCleanupStatusEnum, null: true, description: 'Tags cleanup status for the container repository.'
field :expiration_policy_started_at, Types::TimeType, null: true, description: 'Timestamp when the cleanup done by the expiration policy was started on the container repository.'
field :id, GraphQL::Types::ID, null: false, description: 'ID of the container repository.'
field :last_cleanup_deleted_tags_count, GraphQL::Types::Int, null: true, description: 'Number of deleted tags from the last cleanup.'
field :location, GraphQL::Types::String, null: false, description: 'URL of the container repository.'
field :migration_state, GraphQL::Types::String,
null: false,
description: 'Migration state of the container repository.',
deprecated: {
reason: 'Returns an empty string. This was used for the migration of GitLab.com, which is now complete. Not used by Self-managed instances',
milestone: '17.0'
}
field :name, GraphQL::Types::String, null: false, description: 'Name of the container repository.'
field :path, GraphQL::Types::String, null: false, description: 'Path of the container repository.'
field :project, Types::ProjectType, null: false, description: 'Project of the container registry.'
field :protection_rule_exists, GraphQL::Types::Boolean,
null: false,
experiment: { milestone: '17.2' },
description:
'Whether any matching container protection rule exists for this container. ' \
'Available only when feature flag `container_registry_protected_containers` is enabled.'
field :status, Types::ContainerRepositoryStatusEnum, null: true, description: 'Status of the container repository.'
field :tags_count, GraphQL::Types::Int, null: false, description: 'Number of tags associated with this image.'
field :updated_at, Types::TimeType, null: false, description: 'Timestamp when the container repository was updated.'
def project
Gitlab::Graphql::Loaders::BatchModelLoader.new(Project, object.project_id).find
end
def tags_count
object.tags_count
rescue Faraday::Error
raise ::Gitlab::Graphql::Errors::ResourceNotAvailable, 'We are having trouble connecting to the Container Registry. If this error persists, please review the troubleshooting documentation.'
end
# The migration has now completed and we are cleaning up the migration db columns.
# For backward compatibility, we are keeping this field accessible.
# This field will be removed in 18.0.
def migration_state
''
end
def protection_rule_exists
return false if Feature.disabled?(:container_registry_protected_containers, object.project.root_ancestor)
BatchLoader::GraphQL.for(object.path).batch do |repository_paths, loader|
::ContainerRegistry::Protection::Rule
.for_push_exists_for_multiple_containers(repository_paths: repository_paths, project_id: object.project_id)
.each { |row| loader.call(row['repository_path'], row['protected']) }
end
end
end
end

View File

@ -151,7 +151,7 @@ module Types
resolver: Resolvers::GroupMembersResolver
field :container_repositories,
Types::ContainerRepositoryType.connection_type,
Types::ContainerRegistry::ContainerRepositoryType.connection_type,
null: true,
description: 'Container repositories of the group.',
resolver: Resolvers::ContainerRepositoriesResolver,

View File

@ -529,7 +529,7 @@ module Types
experiment: { milestone: '16.10' },
resolver: Resolvers::ProjectContainerRegistryProtectionRulesResolver
field :container_repositories, Types::ContainerRepositoryType.connection_type,
field :container_repositories, Types::ContainerRegistry::ContainerRepositoryType.connection_type,
null: true,
description: 'Container repositories of the project.',
resolver: Resolvers::ContainerRepositoriesResolver

View File

@ -40,7 +40,7 @@ module Types
null: true,
description: "List of the instance's CI/CD variables.",
resolver: Resolvers::Ci::VariablesResolver
field :container_repository, Types::ContainerRepositoryDetailsType,
field :container_repository, Types::ContainerRegistry::ContainerRepositoryDetailsType,
null: true,
description: 'Find a container repository.' do
argument :id,

View File

@ -7,6 +7,7 @@ module PartitionedTable
attr_reader :partitioning_strategy
PARTITIONING_STRATEGIES = {
daily: Gitlab::Database::Partitioning::Time::DailyStrategy,
monthly: Gitlab::Database::Partitioning::MonthlyStrategy,
sliding_list: Gitlab::Database::Partitioning::SlidingListStrategy,
ci_sliding_list: Gitlab::Database::Partitioning::CiSlidingListStrategy,

View File

@ -1414,11 +1414,7 @@ class MergeRequest < ApplicationRecord
end
def default_auto_merge_strategy
if Feature.enabled?(:merge_when_checks_pass, project)
AutoMergeService::STRATEGY_MERGE_WHEN_CHECKS_PASS
else
AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS
end
AutoMergeService::STRATEGY_MERGE_WHEN_CHECKS_PASS
end
def auto_merge_strategy=(strategy)

View File

@ -44,7 +44,6 @@ module AutoMerge
override :available_for
def available_for?(merge_request)
super do
next false if Feature.disabled?(:merge_when_checks_pass, merge_request.project)
next false if merge_request.project.merge_trains_enabled?
next false if merge_request.mergeable? && !merge_request.diff_head_pipeline_considered_in_progress?

View File

@ -32,12 +32,8 @@ module AutoMerge
end
end
def available_for?(merge_request)
super do
next false if Feature.enabled?(:merge_when_checks_pass, merge_request.project)
merge_request.diff_head_pipeline_considered_in_progress?
end
def available_for?(_merge_request)
false
end
private

View File

@ -83,15 +83,11 @@ module Discussions
def process_auto_merge
return unless discussions_ready_to_merge?
if Feature.enabled?(:merge_when_checks_pass, merge_request.project)
Gitlab::EventStore.publish(
MergeRequests::DiscussionsResolvedEvent.new(
data: { current_user_id: current_user.id, merge_request_id: merge_request.id }
)
Gitlab::EventStore.publish(
MergeRequests::DiscussionsResolvedEvent.new(
data: { current_user_id: current_user.id, merge_request_id: merge_request.id }
)
else
AutoMergeProcessWorker.perform_async({ 'merge_request_id' => merge_request.id })
end
)
end
def discussions_ready_to_merge?

View File

@ -114,7 +114,6 @@ module MergeRequests
end
def cancel_auto_merges_targeting_source_branch(merge_request)
return unless Feature.enabled?(:merge_when_checks_pass, merge_request.project)
return unless params[:delete_source_branch]
merge_request.source_project

View File

@ -238,7 +238,7 @@ module MergeRequests
# email template itself, see `change_in_merge_request_draft_status_email` template.
notify_draft_status_changed(merge_request)
trigger_merge_request_status_updated(merge_request)
publish_draft_change_event(merge_request) if Feature.enabled?(:merge_when_checks_pass, project)
publish_draft_change_event(merge_request)
end
if !old_title_draft && new_title_draft

View File

@ -4,12 +4,8 @@
- type_plural = _('project access tokens')
- @force_desktop_expanded_sidebar = true
.settings-section.js-search-settings-section
.settings-sticky-header
.settings-sticky-header-inner
%h4.gl-my-0
= page_title
%p.gl-text-secondary
= render ::Layouts::SettingsSectionComponent.new(page_title) do |c|
- c.with_description do
- help_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: help_page_path('user/project/settings/project_access_tokens.md') }
- if current_user.can?(:create_resource_access_tokens, @project)
= _('Generate project access tokens scoped to this project for your applications that need access to the GitLab API.')
@ -21,32 +17,32 @@
- link = link_to('', edit_group_path(root_group), target: '_blank', rel: 'noopener noreferrer')
= safe_format(_('You can enable project access token creation in %{link_start}group settings%{link_end}.'), tag_pair(link, :link_start, :link_end))
= html_escape(_('You can still use and manage existing tokens. %{link_start}Learn more.%{link_end}')) % { link_start: help_link_start, link_end: '</a>'.html_safe }
- c.with_body do
#js-new-access-token-app{ data: { access_token_type: type } }
#js-new-access-token-app{ data: { access_token_type: type } }
= render ::Layouts::CrudComponent.new(_('Active project access tokens'),
icon: 'token',
count: @active_access_tokens_size,
count_options: { class: 'js-token-count' },
form_options: { class: 'gl-hidden js-toggle-content js-add-new-token-form' },
options: { class: 'gl-mt-5 js-toggle-container js-token-card' }) do |c|
- c.with_actions do
- if current_user.can?(:create_resource_access_tokens, @project)
= render Pajamas::ButtonComponent.new(size: :small, button_options: { class: 'js-toggle-button js-toggle-content', data: { testid: 'add-new-token-button' } }) do
= _('Add new token')
= render ::Layouts::CrudComponent.new(_('Active project access tokens'),
icon: 'token',
count: @active_access_tokens_size,
count_options: { class: 'js-token-count' },
form_options: { class: 'gl-hidden js-toggle-content js-add-new-token-form' },
options: { class: 'gl-mt-5 js-toggle-container js-token-card' }) do |c|
- c.with_actions do
- if current_user.can?(:create_resource_access_tokens, @project)
= render Pajamas::ButtonComponent.new(size: :small, button_options: { class: 'js-toggle-button js-toggle-content', data: { testid: 'add-new-token-button' } }) do
= _('Add new token')
- c.with_form do
- if current_user.can?(:create_resource_access_tokens, @project)
= render_if_exists 'projects/settings/access_tokens/form', type: type
- c.with_form do
- if current_user.can?(:create_resource_access_tokens, @project)
= render_if_exists 'projects/settings/access_tokens/form', type: type
- c.with_body do
#js-access-token-table-app{ data: { access_token_type: type, access_token_type_plural: type_plural, backend_pagination: 'true', initial_active_access_tokens: @active_access_tokens.to_json, no_active_tokens_message: _('This project has no active access tokens.'), show_role: true } }
- c.with_body do
#js-access-token-table-app{ data: { access_token_type: type, access_token_type_plural: type_plural, backend_pagination: 'true', initial_active_access_tokens: @active_access_tokens.to_json, no_active_tokens_message: _('This project has no active access tokens.'), show_role: true } }
- if Feature.enabled?(:retain_resource_access_token_user_after_revoke, @project.root_ancestor)
.gl-mt-5
= render ::Layouts::CrudComponent.new(_('Inactive project access tokens'),
icon: 'token',
count: @inactive_access_tokens.size,
count_options: { class: 'js-token-count' }) do |c|
- c.with_body do
#js-inactive-access-token-table-app{ data: { access_token_type: type, access_token_type_plural: type_plural, initial_inactive_access_tokens: @inactive_access_tokens.to_json, no_inactive_tokens_message: _('This project has no inactive access tokens.')} }
- if Feature.enabled?(:retain_resource_access_token_user_after_revoke, @project.root_ancestor)
.gl-mt-5
= render ::Layouts::CrudComponent.new(_('Inactive project access tokens'),
icon: 'token',
count: @inactive_access_tokens.size,
count_options: { class: 'js-token-count' }) do |c|
- c.with_body do
#js-inactive-access-token-table-app{ data: { access_token_type: type, access_token_type_plural: type_plural, initial_inactive_access_tokens: @inactive_access_tokens.to_json, no_inactive_tokens_message: _('This project has no inactive access tokens.')} }

View File

@ -19,8 +19,6 @@ module MergeRequests
return
end
return unless Feature.enabled?(:merge_when_checks_pass, merge_request.project)
AutoMergeService.new(merge_request.project, merge_request.merge_user)
.process(merge_request)
end

View File

@ -1,8 +0,0 @@
---
name: merge_when_checks_pass
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/121828
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/412995
milestone: '16.2'
type: beta
group: group::code review
default_enabled: true

View File

@ -21141,9 +21141,9 @@ A container repository.
| <a id="containerrepositoryname"></a>`name` | [`String!`](#string) | Name of the container repository. |
| <a id="containerrepositorypath"></a>`path` | [`String!`](#string) | Path of the container repository. |
| <a id="containerrepositoryproject"></a>`project` | [`Project!`](#project) | Project of the container registry. |
| <a id="containerrepositoryprotectionruleexists"></a>`protectionRuleExists` **{warning-solid}** | [`Boolean!`](#boolean) | **Introduced** in GitLab 17.2. **Status**: Experiment. Whether any matching container protection rule exists for this container. Available only when feature flag `container_registry_protected_containers` is enabled. |
| <a id="containerrepositoryprotectionruleexists"></a>`protectionRuleExists` **{warning-solid}** | [`Boolean!`](#boolean) | **Introduced** in GitLab 17.2. **Status**: Experiment. Whether any matching container protection rule exists for the container. Available only when feature flag `container_registry_protected_containers` is enabled. |
| <a id="containerrepositorystatus"></a>`status` | [`ContainerRepositoryStatus`](#containerrepositorystatus) | Status of the container repository. |
| <a id="containerrepositorytagscount"></a>`tagsCount` | [`Int!`](#int) | Number of tags associated with this image. |
| <a id="containerrepositorytagscount"></a>`tagsCount` | [`Int!`](#int) | Number of tags associated with the image. |
| <a id="containerrepositoryupdatedat"></a>`updatedAt` | [`Time!`](#time) | Timestamp when the container repository was updated. |
| <a id="containerrepositoryuserpermissions"></a>`userPermissions` | [`ContainerRepositoryPermissions!`](#containerrepositorypermissions) | Permissions for the current user on the resource. |
@ -21166,10 +21166,10 @@ Details of a container repository.
| <a id="containerrepositorydetailsname"></a>`name` | [`String!`](#string) | Name of the container repository. |
| <a id="containerrepositorydetailspath"></a>`path` | [`String!`](#string) | Path of the container repository. |
| <a id="containerrepositorydetailsproject"></a>`project` | [`Project!`](#project) | Project of the container registry. |
| <a id="containerrepositorydetailsprotectionruleexists"></a>`protectionRuleExists` **{warning-solid}** | [`Boolean!`](#boolean) | **Introduced** in GitLab 17.2. **Status**: Experiment. Whether any matching container protection rule exists for this container. Available only when feature flag `container_registry_protected_containers` is enabled. |
| <a id="containerrepositorydetailsprotectionruleexists"></a>`protectionRuleExists` **{warning-solid}** | [`Boolean!`](#boolean) | **Introduced** in GitLab 17.2. **Status**: Experiment. Whether any matching container protection rule exists for the container. Available only when feature flag `container_registry_protected_containers` is enabled. |
| <a id="containerrepositorydetailssize"></a>`size` | [`Float`](#float) | Deduplicated size of the image repository in bytes. This is only available on GitLab.com for repositories created after `2021-11-04`. |
| <a id="containerrepositorydetailsstatus"></a>`status` | [`ContainerRepositoryStatus`](#containerrepositorystatus) | Status of the container repository. |
| <a id="containerrepositorydetailstagscount"></a>`tagsCount` | [`Int!`](#int) | Number of tags associated with this image. |
| <a id="containerrepositorydetailstagscount"></a>`tagsCount` | [`Int!`](#int) | Number of tags associated with the image. |
| <a id="containerrepositorydetailsupdatedat"></a>`updatedAt` | [`Time!`](#time) | Timestamp when the container repository was updated. |
| <a id="containerrepositorydetailsuserpermissions"></a>`userPermissions` | [`ContainerRepositoryPermissions!`](#containerrepositorypermissions) | Permissions for the current user on the resource. |

View File

@ -540,7 +540,7 @@ For example:
```ruby
field :tags,
Types::ContainerRepositoryTagType.connection_type,
Types::ContainerRegistry::ContainerRepositoryTagType.connection_type,
null: true,
description: 'Tags of the container repository',
max_page_size: 20
@ -2201,19 +2201,21 @@ Also see the [description style guide for sort enums](#sort-enums).
Example from [`ContainerRepositoriesResolver`](https://gitlab.com/gitlab-org/gitlab/-/blob/dad474605a06c8ed5404978b0a9bd187e9fded80/app/graphql/resolvers/container_repositories_resolver.rb#L13-16):
```ruby
# Types::ContainerRepositorySortEnum:
# Types::ContainerRegistry::ContainerRepositorySortEnum:
module Types
class ContainerRepositorySortEnum < SortEnum
graphql_name 'ContainerRepositorySort'
description 'Values for sorting container repositories'
module ContainerRegistry
class ContainerRepositorySortEnum < SortEnum
graphql_name 'ContainerRepositorySort'
description 'Values for sorting container repositories'
value 'NAME_ASC', 'Name by ascending order.', value: :name_asc
value 'NAME_DESC', 'Name by descending order.', value: :name_desc
value 'NAME_ASC', 'Name by ascending order.', value: :name_asc
value 'NAME_DESC', 'Name by descending order.', value: :name_desc
end
end
end
# Resolvers::ContainerRepositoriesResolver:
argument :sort, Types::ContainerRepositorySortEnum,
argument :sort, Types::ContainerRegistry::ContainerRepositorySortEnum,
description: 'Sort container repositories by this criteria.',
required: false,
default_value: :created_desc

View File

@ -8,17 +8,27 @@ info: Any user with at least the Maintainer role can merge updates to this conte
GitLab supports native Emojis through the [`tanuki_emoji`](https://gitlab.com/gitlab-org/ruby/gems/tanuki_emoji) gem.
NOTE:
[`tanuki_emoji`](https://gitlab.com/gitlab-org/ruby/gems/tanuki_emoji) gem has replaced [`gemojione`](https://github.com/bonusly/gemojione). See [more information here](https://gitlab.com/gitlab-org/gitlab/-/issues/429653#note_1931385720).
## How to update Emojis
1. Update the [`tanuki_emoji`](https://gitlab.com/gitlab-org/ruby/gems/tanuki_emoji) gem.
Because our emoji support is implemented on both the backend and the frontend, we need to update support over three milestones.
### First milestone (backend)
1. Update the [`tanuki_emoji`](https://gitlab.com/gitlab-org/ruby/gems/tanuki_emoji) gem as needed.
1. Update the `Gemfile` to use the latest `tanuki_emoji` gem.
1. Update the `Gemfile` to use the latest [`unicode-emoji`](https://github.com/janlelis/unicode-emoji) that supports the version of Unicode you're upgrading to.
1. Update `EMOJI_VERSION` in `lib/gitlab/emoji.rb`
1. `bundle exec rake tanuki_emoji:import` - imports all fallback images into the versioned `public/-/emojis` directory.
Ensure you see new individual images copied into there.
1. When testing, you should be able to use the shortcodes of any new emojis and have them display.
1. See example MRs [one](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/171446) and
[two](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/170289) for the backend.
### Second milestone (frontend)
1. Update `EMOJI_VERSION` in `app/assets/javascripts/emoji/index.js`
1. Use the [`tanuki_emoji`](https://gitlab.com/gitlab-org/ruby/gems/tanuki_emoji) gem's [Rake tasks](../rake_tasks.md) to update aliases, fallback images, digests, and sprites. Run in the following order:
1. Use the [`tanuki_emoji`](https://gitlab.com/gitlab-org/ruby/gems/tanuki_emoji) gem's [Rake tasks](../rake_tasks.md) to update aliases, digests, and sprites. Run in the following order:
1. `bundle exec rake tanuki_emoji:aliases` - updates `fixtures/emojis/aliases.json`
1. `bundle exec rake tanuki_emoji:import` - imports all the images into `public/-/emojis` directory
1. `bundle exec rake tanuki_emoji:digests` - updates `public/-/emojis/VERSION/emojis.json` and `fixtures/emojis/digests.json`
1. `bundle exec rake tanuki_emoji:sprite` - creates new sprite sheets
@ -36,17 +46,20 @@ NOTE:
- Positive intent should be set to `0.5`.
- Neutral intent can be set to `1`. This is applied to all emoji automatically so there is no need to set this explicitly.
- Negative intent should be set to `1.5`.
1. Ensure you see new individual images copied into `app/assets/images/emoji/`
1. Ensure you can see the new emojis and their aliases in the GitLab Flavored Markdown (GLFM) Autocomplete
1. Ensure you can see the new emojis and their aliases in the emoji reactions menu
1. You might need to add new emoji Unicode support checks and rules for platforms
that do not support a certain emoji and we need to fallback to an image.
See `app/assets/javascripts/emoji/support/is_emoji_unicode_supported.js`
and `app/assets/javascripts/emoji/support/unicode_support_map.js`
- if a new version of Unicode emojis is being added, update the list in `app/assets/javascripts/emoji/support/unicode_support_map.js`
1. Ensure you use the version of [emoji-regex](https://github.com/mathiasbynens/emoji-regex) that corresponds
to the version of Unicode that is being supported. This should be updated in `package.json`. Used for
filtering emojis in `app/assets/javascripts/emoji/index.js`.
1. Have there been any changes to the category names? If so then `app/assets/javascripts/emoji/constants.js`
will need to be updated
1. See an [example MR](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/166790)
1. When testing
1. Ensure you can see the new emojis and their aliases in the GitLab Flavored Markdown (GLFM) Autocomplete
1. Ensure you can see the new emojis and their aliases in the emoji reactions menu
### Third milestone (cleanup)
Remove any old emoji versions from the `public/-/emojis` directory. This is not strictly necessary -
everything continues to work if you don't do this. However it's good to clean it up.

View File

@ -51,6 +51,7 @@ other scanners) during a scan could cause inaccurate results.
The following projects demonstrate API security testing scanning:
- [Example OpenAPI v3 Specification project](https://gitlab.com/gitlab-org/security-products/demos/api-dast/openapi-v3-example)
- [Example OpenAPI v2 Specification project](https://gitlab.com/gitlab-org/security-products/demos/api-dast/openapi-example)
- [Example HTTP Archive (HAR) project](https://gitlab.com/gitlab-org/security-products/demos/api-dast/har-example)
- [Example Postman Collection project](https://gitlab.com/gitlab-org/security-products/demos/api-dast/postman-example)

View File

@ -118,6 +118,26 @@ When enforcing pipeline execution policies over projects whose CI/CD configurati
control, you should define jobs in the `.pipeline-policy-pre` and `.pipeline-policy-post` stages.
These stages are always available, regardless of any project's CI/CD configuration.
When you use the `override_project_ci` [pipeline strategy](#pipeline-strategies) with multiple
pipeline execution policies and with custom stages, the stages must be defined in the same relative order
to be compatible with each other:
Valid configuration example:
```yaml
- `override-policy-1` stages: `[build, test, policy-test, deploy]`
- `override-policy-2` stages: `[test, deploy]`
```
Invalid configuration example:
```yaml
- `override-policy-1` stages: `[build, test, policy-test, deploy]`
- `override-policy-2` stages: `[deploy, test]`
```
The pipeline fails if one or more `override_project_ci` policies has an invalid `stages` configuration.
### `content` type
| Field | Type | Required | Description |
@ -199,12 +219,13 @@ compliance_job:
...
```
NOTE:
Jobs from the project configuration that are defined for a custom
`stage` are excluded from the final pipeline.
To include a job in the final configuration, define it for a
[default pipeline stage](../../../ci/yaml/index.md#stages) or a reserved
stage (`.pipeline-policy-pre` or `.pipeline-policy-post`).
> Jobs from the project configuration that are defined for a custom
> `stage` are excluded from the final pipeline.
> To include a job in the final configuration, you can:
>
> - Use [stages](../../../ci/yaml/index.md#stages) to define custom stages in the pipeline execution policy configuration.
> - Use a [default pipeline stage](../../../ci/yaml/index.md#stages)
> - Use a reserved stage (`.pipeline-policy-pre` or `.pipeline-policy-post`).
## CI/CD variables

View File

@ -17,8 +17,9 @@ DETAILS:
> - [Enabled](https://gitlab.com/gitlab-org/gitlab/-/issues/412995) the flags `merge_when_checks_pass` and `additional_merge_when_checks_ready` on GitLab.com in GitLab 17.0.
> - [Merged](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/154366) the flag `additional_merge_when_checks_ready` with `merge_when_checks_pass` in GitLab 17.1.
> - [Enabled](https://gitlab.com/gitlab-org/gitlab/-/issues/412995) the flags `merge_when_checks_pass` by default in GitLab 17.4.
> - Merge when checks pass [generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/412995) in GitLab 17.7. Feature flag `merge_when_checks_pass` removed.
When you enable the `merge_when_checks_pass` feature flag, if the content of a merge request is ready to merge,
If the content of a merge request is ready to merge,
you can select **Set to auto-merge**. The merge request auto-merges when all required checks complete successfully, and you don't need to remember to manually merge the merge request.
After you set auto-merge, these checks must all pass before the merge request merges:

View File

@ -0,0 +1,83 @@
# frozen_string_literal: true
module Gitlab
module Database
module Partitioning
module Time
class DailyStrategy < BaseStrategy
HEADROOM = 28.days
PARTITION_SUFFIX = '%Y%m%d'
def current_partitions
Gitlab::Database::PostgresPartition.for_parent_table(table_name).map do |partition|
TimePartition.from_sql(table_name, partition.name, partition.condition)
end
end
# Check the currently existing partitions and determine which ones are missing
def missing_partitions
desired_partitions - current_partitions
end
def extra_partitions
partitions = current_partitions - desired_partitions
partitions.reject!(&:holds_data?) if retain_non_empty_partitions
partitions
end
def desired_partitions
[].tap do |parts|
min_date, max_date = relevant_range
if pruning_old_partitions? && min_date <= oldest_active_date
min_date = oldest_active_date.beginning_of_day.to_date
else
parts << partition_for(upper_bound: min_date)
end
while min_date < max_date
next_date = min_date.next_day
parts << partition_for(lower_bound: min_date, upper_bound: next_date)
min_date = next_date
end
end
end
def relevant_range
first_partition = current_partitions.min
if first_partition
# Case 1: First partition starts with MINVALUE, i.e. from is nil -> start with first real partition
# Case 2: Rather unexpectedly, first partition does not start with MINVALUE, i.e. from is not nil
# In this case, use first partition beginning as a start
min_date = first_partition.from || first_partition.to
end
min_date ||= oldest_active_date if pruning_old_partitions?
# In case we don't have a partition yet
min_date ||= Date.current
min_date = min_date.beginning_of_day.to_date
max_date = Date.current.end_of_day.to_date + HEADROOM
[min_date, max_date]
end
def oldest_active_date
retain_for.ago.beginning_of_day.to_date
end
def partition_name(lower_bound)
suffix = lower_bound&.strftime(PARTITION_SUFFIX) || '00000000'
"#{table_name}_#{suffix}"
end
end
end
end
end
end

View File

@ -57754,12 +57754,6 @@ msgid_plural "Todos|Marked %d to-dos as done"
msgstr[0] ""
msgstr[1] ""
msgid "Todos|Marked as done"
msgstr ""
msgid "Todos|Marked as undone"
msgstr ""
msgid "Todos|Member access request"
msgstr ""

View File

@ -397,7 +397,6 @@ spec/frontend/vue_shared/components/page_heading_spec.js
spec/frontend/vue_shared/components/project_selector/project_selector_spec.js
spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
spec/frontend/vue_shared/components/smart_virtual_list_spec.js
spec/frontend/vue_shared/components/source_viewer/components/chunk_spec.js
spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js
spec/frontend/vue_shared/components/upload_dropzone/upload_dropzone_spec.js
spec/frontend/vue_shared/directives/tooltip_on_truncate_spec.js

View File

@ -662,18 +662,6 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
let(:status) { 'merge_when_checks_pass' }
let(:not_current_pipeline_status) { 'merge_when_checks_pass' }
end
context 'when merge_when_checks_pass is false' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it_behaves_like 'api merge with auto merge' do
let(:service_class) { AutoMerge::MergeWhenPipelineSucceedsService }
let(:status) { 'merge_when_pipeline_succeeds' }
let(:not_current_pipeline_status) { 'failed' }
end
end
end
describe 'only_allow_merge_if_all_discussions_are_resolved? setting' do

View File

@ -137,7 +137,9 @@ FactoryBot.define do
auto_merge_enabled { true }
auto_merge_strategy { AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS }
merge_user { author }
merge_params { { sha: diff_head_sha } }
merge_params do
{ 'auto_merge_strategy' => AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS, sha: diff_head_sha }
end
end
trait :merge_when_checks_pass do

View File

@ -30,11 +30,7 @@ RSpec.describe 'Merge request > User resolves Draft', :js, feature_category: :co
end
context 'when there is active pipeline for merge request' do
let(:feature_flags_state) { true }
before do
stub_feature_flags(merge_when_checks_pass: feature_flags_state)
create(:ci_build, pipeline: pipeline)
sign_in(user)
@ -58,27 +54,5 @@ RSpec.describe 'Merge request > User resolves Draft', :js, feature_category: :co
expect(page.find('.ci-widget-content', wait: 0)).to have_content("Pipeline ##{pipeline.id}")
expect(page).to have_content("Set to auto-merge")
end
context 'when the new merge_when_checks_pass and merge blocked components are disabled' do
let(:feature_flags_state) { false }
it 'retains merge request data after clicking Resolve WIP status' do
expect(page.find('.ci-widget-content')).to have_content("Pipeline ##{pipeline.id}")
expect(page).to have_content "Merge blocked: 1 check failed"
expect(page).to have_content "Merge request must not be draft"
page.within('.mr-state-widget') do
click_button('Mark as ready')
end
wait_for_requests
# If we don't disable the wait here, the test will wait until the
# merge request widget refreshes, which masks missing elements
# that should already be present.
expect(page.find('.ci-widget-content', wait: 0)).to have_content("Pipeline ##{pipeline.id}")
expect(page).not_to have_content("Merge blocked")
end
end
end
end

View File

@ -331,34 +331,19 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
visit project_merge_request_path(project_only_mwps, merge_request_in_only_mwps_project)
end
context 'when using merge when pipeline succeeds' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it 'is not allowed to set auto merge' do
# Wait for the `ci_status` and `merge_check` requests
wait_for_requests
it 'is not allowed to merge' do
# Wait for the `ci_status` and `merge_check` requests
wait_for_requests
expect(page).not_to have_selector('.accept-merge-request')
end
end
context 'when using merge when checks pass' do
it 'is not allowed to set auto merge' do
# Wait for the `ci_status` and `merge_check` requests
wait_for_requests
expect(page).to have_selector('.accept-merge-request')
end
expect(page).to have_selector('.accept-merge-request')
end
end
context 'view merge request with MWPS enabled but automatically merge fails' do
context 'view merge request with auto merge enabled but automatically merge fails' do
before do
merge_request.update!(
auto_merge_enabled: true,
auto_merge_strategy: AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS,
auto_merge_strategy: AutoMergeService::STRATEGY_MERGE_WHEN_CHECKS_PASS,
merge_user: merge_request.author,
merge_error: 'Something went wrong'
)
@ -376,7 +361,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
end
end
context 'view merge request with MWPS enabled but automatically merge fails' do
context 'view merge request with auto merge enabled but automatically merge fails' do
before do
merge_request.update!(
merge_when_pipeline_succeeds: true,

View File

@ -16,9 +16,6 @@ describe('TodoItemActions', () => {
todo: mockTodo,
...props,
},
provide: {
currentTab: 0,
},
});
};
@ -43,13 +40,6 @@ describe('TodoItemActions', () => {
});
describe('tooltipTitle', () => {
it('returns null when isLoading is true', () => {
createComponent();
// eslint-disable-next-line no-restricted-syntax
wrapper.setData({ isLoading: true });
expect(wrapper.vm.tooltipTitle).toBeNull();
});
it('returns "Mark as done" for pending todo', () => {
createComponent();
expect(wrapper.vm.tooltipTitle).toBe('Mark as done');

View File

@ -21,6 +21,9 @@ describe('TodoItem', () => {
},
...props,
},
provide: {
currentTab: 0,
},
});
};
@ -49,16 +52,23 @@ describe('TodoItem', () => {
expect(wrapper.findComponent(TodoItemActions).exists()).toBe(true);
});
describe('state based style', () => {
it('applies background when todo is done', () => {
createComponent({ todo: { state: TODO_STATE_DONE } });
expect(wrapper.attributes('class')).toContain('gl-bg-subtle');
});
it('applies no background when todo is pending', () => {
createComponent({ todo: { state: TODO_STATE_PENDING } });
expect(wrapper.attributes('class')).not.toContain('gl-bg-subtle');
});
});
describe('computed properties', () => {
it('isDone returns true when todo state is done', () => {
createComponent({ todo: { state: TODO_STATE_DONE } });
expect(wrapper.vm.isDone).toBe(true);
});
it('isPending returns true when todo state is pending', () => {
createComponent({ todo: { state: TODO_STATE_PENDING } });
expect(wrapper.vm.isPending).toBe(true);
});
});
it('emits change event when TodoItemActions emits change', async () => {

View File

@ -41,6 +41,7 @@ describe('TodosApp', () => {
const findMarkAllDoneButton = () => wrapper.findComponent(TodosMarkAllDoneButton);
const findRefreshButton = () => wrapper.findByTestId('refresh-todos');
const findPendingTodosCount = () => wrapper.findByTestId('pending-todos-count');
const findTodoItemListContainer = () => wrapper.findByTestId('todo-item-list-container');
it('should have a tracking event for each tab', () => {
expect(STATUS_BY_TAB.length).toBe(INSTRUMENT_TAB_LABELS.length);
@ -145,6 +146,61 @@ describe('TodosApp', () => {
expect(todosCountsQuerySuccessHandler).toHaveBeenCalledTimes(2);
});
it('refetches todos one second after the cursor leaves the list of todos', async () => {
jest.useFakeTimers();
createComponent();
// Wait and account for initial query
await waitForPromises();
expect(todosQuerySuccessHandler).toHaveBeenCalledTimes(1);
expect(todosCountsQuerySuccessHandler).toHaveBeenCalledTimes(1);
// Simulate interacting with a todo item then mousing out of the list zone
wrapper.vm.handleItemChanged(1, true);
const list = findTodoItemListContainer();
list.trigger('mouseleave');
// Should refresh the count, but not the list
await waitForPromises();
expect(todosQuerySuccessHandler).toHaveBeenCalledTimes(1);
expect(todosCountsQuerySuccessHandler).toHaveBeenCalledTimes(2);
// Run out the clock
jest.advanceTimersByTime(1000 + 50); // 1s + some jitter
// Refreshes the count and the list
await waitForPromises();
expect(todosQuerySuccessHandler).toHaveBeenCalledTimes(2);
expect(todosCountsQuerySuccessHandler).toHaveBeenCalledTimes(3);
});
it('does not refresh todos after the cursor leaves the list of todos if nothing changed', async () => {
jest.useFakeTimers();
createComponent();
// Wait and account for initial query
await waitForPromises();
expect(todosQuerySuccessHandler).toHaveBeenCalledTimes(1);
expect(todosCountsQuerySuccessHandler).toHaveBeenCalledTimes(1);
// Simulate NOT interacting with a todo item then mousing out of the list zone
const list = findTodoItemListContainer();
list.trigger('mouseleave');
// Should not update anything
await waitForPromises();
expect(todosQuerySuccessHandler).toHaveBeenCalledTimes(1);
expect(todosCountsQuerySuccessHandler).toHaveBeenCalledTimes(1);
// Run out the clock
jest.advanceTimersByTime(1000 + 50); // 1s + some jitter
// Should not update anything
await waitForPromises();
expect(todosQuerySuccessHandler).toHaveBeenCalledTimes(1);
expect(todosCountsQuerySuccessHandler).toHaveBeenCalledTimes(1);
});
it('passes the default status to the filter bar', () => {
createComponent();

View File

@ -94,23 +94,6 @@ RSpec.describe Mutations::MergeRequests::Accept, feature_category: :api do
end
expect(result).to include(errors: be_empty, merge_request: be_auto_merge_enabled)
end
context 'when merge_when_checks_pass is off' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
let(:merge_request) { create(:merge_request, :with_head_pipeline, source_project: project) }
let(:strategy) { ::Types::MergeStrategyEnum.values['MERGE_WHEN_PIPELINE_SUCCEEDS'].value }
let(:additional_args) { { auto_merge_strategy: strategy } }
it "can use the MERGE_WHEN_PIPELINE_SUCCEEDS strategy" do
expect_next_found_instance_of(MergeRequest) do |instance|
expect(instance).not_to receive(:merge_async)
end
expect(result).to include(errors: be_empty, merge_request: be_auto_merge_enabled)
end
end
end
end
end

View File

@ -2,12 +2,12 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['ContainerRepositoryCleanupStatus'] do
RSpec.describe GitlabSchema.types['ContainerRepositoryCleanupStatus'], feature_category: :container_registry do
it 'exposes all statuses' do
expected_keys = ContainerRepository.expiration_policy_cleanup_statuses
.keys
.map { |k| k.gsub('cleanup_', '') }
.map(&:upcase)
expect(described_class.values.keys).to contain_exactly(*expected_keys)
expect(described_class.values.keys).to match_array(expected_keys)
end
end

View File

@ -4,9 +4,9 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['ContainerRepositoryDetails'], feature_category: :container_registry do
fields = %i[id name path location created_at updated_at expiration_policy_started_at
status tags_count expiration_policy_cleanup_status tags size manifest
project migration_state last_cleanup_deleted_tags_count user_permissions last_published_at
protection_rule_exists]
status tags_count expiration_policy_cleanup_status tags size manifest
project migration_state last_cleanup_deleted_tags_count user_permissions last_published_at
protection_rule_exists]
it { expect(described_class.graphql_name).to eq('ContainerRepositoryDetails') }
@ -20,7 +20,7 @@ RSpec.describe GitlabSchema.types['ContainerRepositoryDetails'], feature_categor
subject { described_class.fields['tags'] }
it 'returns tags connection type' do
is_expected.to have_graphql_type(Types::ContainerRepositoryTagType.connection_type)
is_expected.to have_graphql_type(Types::ContainerRegistry::ContainerRepositoryTagType.connection_type)
end
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['ContainerRepositorySort'] do
RSpec.describe GitlabSchema.types['ContainerRepositorySort'], feature_category: :container_registry do
specify { expect(described_class.graphql_name).to eq('ContainerRepositorySort') }
it_behaves_like 'common sort values'

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe GitlabSchema.types['ContainerRepositoryStatus'], feature_category: :container_registry do
it 'exposes all statuses' do
expect(described_class.values.keys).to match_array(ContainerRepository.statuses.keys.map(&:upcase))
end
end

View File

@ -6,8 +6,8 @@ RSpec.describe GitlabSchema.types['ContainerRepository'], feature_category: :con
include GraphqlHelpers
fields = %i[id name path location created_at updated_at expiration_policy_started_at
status tags_count expiration_policy_cleanup_status project
migration_state last_cleanup_deleted_tags_count user_permissions protection_rule_exists]
status tags_count expiration_policy_cleanup_status project
migration_state last_cleanup_deleted_tags_count user_permissions protection_rule_exists]
it { expect(described_class.graphql_name).to eq('ContainerRepository') }
@ -23,7 +23,7 @@ RSpec.describe GitlabSchema.types['ContainerRepository'], feature_category: :con
subject { described_class.fields['status'] }
it 'returns status enum' do
is_expected.to have_graphql_type(Types::ContainerRepositoryStatusEnum)
is_expected.to have_graphql_type(Types::ContainerRegistry::ContainerRepositoryStatusEnum)
end
end
@ -31,7 +31,7 @@ RSpec.describe GitlabSchema.types['ContainerRepository'], feature_category: :con
subject { described_class.fields['expirationPolicyCleanupStatus'] }
it 'returns cleanup status enum' do
is_expected.to have_graphql_type(Types::ContainerRepositoryCleanupStatusEnum)
is_expected.to have_graphql_type(Types::ContainerRegistry::ContainerRepositoryCleanupStatusEnum)
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['ContainerRepositoryTagSort'] do
RSpec.describe GitlabSchema.types['ContainerRepositoryTagSort'], feature_category: :container_registry do
specify { expect(described_class.graphql_name).to eq('ContainerRepositoryTagSort') }
it 'exposes all the existing issue sort values' do

View File

@ -1,9 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe GitlabSchema.types['ContainerRepositoryStatus'] do
it 'exposes all statuses' do
expect(described_class.values.keys).to contain_exactly(*ContainerRepository.statuses.keys.map(&:upcase))
end
end

View File

@ -164,8 +164,8 @@ RSpec.describe GitlabSchema.types['MergeRequest'], feature_category: :code_revie
end
end
context 'when MR is set to merge when pipeline succeeds' do
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds, target_project: project, source_project: project) }
context 'when MR is set to auto merge' do
let(:merge_request) { create(:merge_request, :merge_when_checks_pass, target_project: project, source_project: project) }
it 'is not nil' do
value = resolve_field(:merge_user, merge_request)

View File

@ -119,7 +119,7 @@ RSpec.describe GitlabSchema.types['Query'], feature_category: :shared do
describe 'container_repository field' do
subject { described_class.fields['containerRepository'] }
it { is_expected.to have_graphql_type(Types::ContainerRepositoryDetailsType) }
it { is_expected.to have_graphql_type(Types::ContainerRegistry::ContainerRepositoryDetailsType) }
end
describe 'package field' do

View File

@ -0,0 +1,346 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::Partitioning::Time::DailyStrategy, feature_category: :database do
let(:connection) { ApplicationRecord.connection }
let(:daily_strategy) do
described_class.new(model, partitioning_key, retain_for: retention_period, retain_non_empty_partitions: retain_data)
end
let(:retention_period) { nil }
let(:retain_data) { false }
let(:partitioning_key) { :created_at }
let(:table_name) { model.table_name }
let(:model) do
Class.new(ApplicationRecord) do
self.table_name = '_test_partitioned_test'
self.primary_key = :id
end
end
describe '#current_partitions' do
subject(:current_partitions) { daily_strategy.current_partitions }
before do
connection.execute(<<~SQL)
CREATE TABLE #{table_name}
(id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at))
PARTITION BY RANGE (created_at);
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_00000000
PARTITION OF #{table_name}
FOR VALUES FROM (MINVALUE) TO ('2020-05-01');
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_20200501
PARTITION OF #{table_name}
FOR VALUES FROM ('2020-05-01') TO ('2020-05-02');
SQL
end
it 'detects both partitions' do
expect(current_partitions).to eq(
[
time_partition(table_name, nil, '2020-05-01', "#{model.table_name}_00000000"),
time_partition(table_name, '2020-05-01', '2020-05-02', "#{model.table_name}_20200501")
])
end
end
describe '#missing_partitions', time_travel_to: '2020-05-04' do
subject(:missing_partitions) { daily_strategy.missing_partitions }
context 'with existing partitions' do
before do
connection.execute(<<~SQL)
CREATE TABLE #{table_name}
(id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at))
PARTITION BY RANGE (created_at);
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_00000000
PARTITION OF #{table_name}
FOR VALUES FROM (MINVALUE) TO ('2020-05-01');
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_20200502
PARTITION OF #{table_name}
FOR VALUES FROM ('2020-05-02') TO ('2020-05-03');
SQL
# Insert some data, it doesn't make a difference
model.create!(created_at: Date.parse('2020-04-15'))
model.create!(created_at: Date.parse('2020-05-02'))
end
context 'when pruning partitions before 2020-05-02' do
let(:retention_period) { 1.day }
it 'does not include the missing partition from 2020-05-02 because it would be dropped' do
expect(missing_partitions).not_to include(
time_partition(table_name, '2020-05-01', '2020-05-02', "#{model.table_name}_20200501")
)
end
it 'detects the missing partition for 1 day ago (2020-05-03)' do
expect(missing_partitions).to include(
time_partition(table_name, '2020-05-03', '2020-05-04', "#{model.table_name}_20200503")
)
end
end
it 'detects the gap and the missing partition for 2020-05-01' do
expect(missing_partitions).to include(
time_partition(table_name, '2020-05-01', '2020-05-02', "#{model.table_name}_20200501")
)
end
it 'detects the missing partitions at the end of the range and expects a partition for 2020-05-03' do
expect(missing_partitions).to include(
time_partition(table_name, '2020-05-03', '2020-05-04', "#{model.table_name}_20200503")
)
end
it 'detects the missing partitions at the end of the range and expects a partition for 2020-05-05' do
expect(missing_partitions).to include(
time_partition(model.table_name, '2020-05-05', '2020-05-06', "#{model.table_name}_20200505")
)
end
it 'creates partitions 7 days out from now (2020-05-04 to 2020-05-10)' do
expect(missing_partitions).to include(
time_partition(table_name, '2020-05-04', '2020-05-05', "#{model.table_name}_20200504"),
time_partition(table_name, '2020-05-05', '2020-05-06', "#{model.table_name}_20200505"),
time_partition(table_name, '2020-05-06', '2020-05-07', "#{model.table_name}_20200506"),
time_partition(table_name, '2020-05-07', '2020-05-08', "#{model.table_name}_20200507"),
time_partition(table_name, '2020-05-08', '2020-05-09', "#{model.table_name}_20200508"),
time_partition(table_name, '2020-05-09', '2020-05-10', "#{model.table_name}_20200509"),
time_partition(table_name, '2020-05-10', '2020-05-11', "#{model.table_name}_20200510")
)
end
it 'detects all missing partitions' do
expect(missing_partitions.size).to eq(30)
end
end
context 'without existing partitions' do
before do
connection.execute(<<~SQL)
CREATE TABLE #{table_name}
(id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at))
PARTITION BY RANGE (created_at);
SQL
end
context 'when pruning partitions before 2020-05-02' do
let(:retention_period) { 1.day }
it 'detects exactly the set of partitions from 2020-05-03 to 2020-05-31' do
days = (Date.parse('2020-05-03')..Date.parse('2020-06-01')).map(&:to_s)
expected = days[..-2].zip(days.drop(1)).map do |(from, to)|
partition_name = "#{model.table_name}_#{Date.parse(from).strftime('%Y%m%d')}"
time_partition(model.table_name, from, to, partition_name)
end
expect(missing_partitions).to match_array(expected)
end
end
it 'detects the missing catch-all partition at the beginning' do
expect(missing_partitions).to include(
time_partition(table_name, nil, '2020-05-04', "#{model.table_name}_00000000")
)
end
it 'detects the missing partition for today and expects a partition for 2020-05-04' do
expect(missing_partitions).to include(
time_partition(table_name, '2020-05-04', '2020-05-05', "#{model.table_name}_20200504")
)
end
it 'creates partitions 7 days out from now (2020-05-04 through 2020-05-10)' do
expect(missing_partitions).to include(
time_partition(table_name, '2020-05-04', '2020-05-05', "#{model.table_name}_20200504"),
time_partition(table_name, '2020-05-05', '2020-05-06', "#{model.table_name}_20200505"),
time_partition(table_name, '2020-05-06', '2020-05-07', "#{model.table_name}_20200506"),
time_partition(table_name, '2020-05-07', '2020-05-08', "#{model.table_name}_20200507"),
time_partition(table_name, '2020-05-08', '2020-05-09', "#{model.table_name}_20200508"),
time_partition(table_name, '2020-05-09', '2020-05-10', "#{model.table_name}_20200509"),
time_partition(table_name, '2020-05-10', '2020-05-11', "#{model.table_name}_20200510")
)
end
it 'detects all missing partitions' do
expect(missing_partitions.size).to eq(29)
end
end
context 'with a regular partition but no catchall (MINVALUE, to) partition' do
before do
connection.execute(<<~SQL)
CREATE TABLE #{table_name}
(id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at))
PARTITION BY RANGE (created_at);
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_20200501
PARTITION OF #{table_name}
FOR VALUES FROM ('2020-05-01') TO ('2020-05-02');
SQL
end
it 'detects a missing catch-all partition to add before the existing partition' do
expect(missing_partitions).to include(
time_partition(table_name, nil, '2020-05-01', "#{model.table_name}_00000000")
)
end
end
end
describe '#extra_partitions', time_travel_to: '2020-05-04' do
subject(:extra_partitions) { daily_strategy.extra_partitions }
describe 'with existing partitions' do
before do
ActiveRecord::Base.connection.execute(<<~SQL)
CREATE TABLE #{table_name}
(id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at))
PARTITION BY RANGE (created_at);
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_00000000
PARTITION OF #{table_name}
FOR VALUES FROM (MINVALUE) TO ('2020-05-01');
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_20200501
PARTITION OF #{table_name}
FOR VALUES FROM ('2020-05-01') TO ('2020-05-02');
CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_20200502
PARTITION OF #{table_name}
FOR VALUES FROM ('2020-05-02') TO ('2020-05-03')
SQL
end
context 'without a time retention policy' do
it 'has no extra partitions to prune' do
expect(extra_partitions).to be_empty
end
end
context 'with a time retention policy that excludes no partitions' do
let(:retention_period) { 4.days }
it 'has no extra partitions to prune' do
expect(extra_partitions).to be_empty
end
end
context 'with a time retention policy of 3 days' do
let(:retention_period) { 3.days }
it 'prunes the unbounded partition ending 2020-05-01' do
min_value = time_partition(table_name, nil, '2020-05-01', "#{model.table_name}_00000000")
expect(extra_partitions).to contain_exactly(min_value)
end
end
context 'with a time retention policy of 2 days' do
let(:retention_period) { 2.days }
it 'prunes the unbounded partition and the partition for min value to 2020-05-01' do
expect(extra_partitions).to contain_exactly(
time_partition(table_name, nil, '2020-05-01', "#{model.table_name}_00000000"),
time_partition(table_name, '2020-05-01', '2020-05-02', "#{model.table_name}_20200501")
)
end
context 'when the retain_non_empty_partitions is true' do
let(:retain_data) { true }
it 'prunes empty partitions' do
expect(extra_partitions).to contain_exactly(
time_partition(table_name, nil, '2020-05-01', "#{model.table_name}_00000000"),
time_partition(table_name, '2020-05-01', '2020-05-02', "#{model.table_name}_20200501")
)
end
it 'does not prune non-empty partitions' do
# inserting one record into _test_partitioned_test_20200501
connection.execute("INSERT INTO #{table_name} (created_at) VALUES (('2020-05-01'))")
expect(extra_partitions).to contain_exactly(
time_partition(table_name, nil, '2020-05-01', "#{model.table_name}_00000000")
)
end
end
end
context 'with a time retention policy of 1 day' do
let(:retention_period) { 1.day }
it 'prunes the unbounded partition and the partitions for 2020-05-01 and 2020-05-02' do
expect(extra_partitions).to contain_exactly(
time_partition(table_name, nil, '2020-05-01', "#{model.table_name}_00000000"),
time_partition(table_name, '2020-05-01', '2020-05-02', "#{model.table_name}_20200501"),
time_partition(table_name, '2020-05-02', '2020-05-03', "#{model.table_name}_20200502")
)
end
context 'when the retain_non_empty_partitions is true' do
let(:retain_data) { true }
it 'prunes empty partitions' do
expect(extra_partitions).to contain_exactly(
time_partition(table_name, nil, '2020-05-01', "#{model.table_name}_00000000"),
time_partition(table_name, '2020-05-01', '2020-05-02', "#{model.table_name}_20200501"),
time_partition(table_name, '2020-05-02', '2020-05-03', "#{model.table_name}_20200502")
)
end
it 'does not prune non-empty partitions' do
# inserting one record into _test_partitioned_test_20200501
connection.execute("INSERT INTO #{table_name} (created_at) VALUES (('2020-05-01'))")
expect(extra_partitions).to contain_exactly(
time_partition(table_name, nil, '2020-05-01', "#{model.table_name}_00000000"),
time_partition(table_name, '2020-05-02', '2020-05-03', "#{model.table_name}_20200502")
)
end
end
end
end
end
describe '#partition_name' do
let(:from) { Date.parse('2020-05-01 00:00:00') }
let(:to) { Date.parse('2020-05-02 00:00:00') }
subject(:partition_name) { daily_strategy.partition_name(from) }
it 'uses table_name as prefix' do
expect(partition_name).to start_with(table_name)
end
it 'uses Year-Month-Day (from) as suffix' do
expect(partition_name).to end_with("_20200501")
end
context 'without from date' do
let(:from) { nil }
it 'uses 00000000 as suffix for first partition' do
expect(partition_name).to end_with("_00000000")
end
end
end
private
def time_partition(table_name, lower_bound, upper_bound, partition_name)
Gitlab::Database::Partitioning::TimePartition.new(
table_name,
lower_bound,
upper_bound,
partition_name: partition_name
)
end
end

View File

@ -173,7 +173,7 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory, :use_clean_rails_
expect(created_object.target_project).to equal(project)
end
it 'has MWPS set to false' do
it 'has auto merge set to false' do
expect(created_object.merge_when_pipeline_succeeds).to eq(false)
end

View File

@ -313,7 +313,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer, feature_category: :i
end
end
it 'sets MWPS to false for all merge requests' do
it 'sets auto merge to false for all merge requests' do
MergeRequest.find_each do |merge_request|
expect(merge_request.merge_when_pipeline_succeeds).to eq(false)
end

View File

@ -1822,7 +1822,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category:
describe 'auto merge' do
context 'when auto merge is enabled' do
let_it_be_with_reload(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
let_it_be_with_reload(:merge_request) { create(:merge_request, :merge_when_checks_pass) }
let_it_be_with_reload(:pipeline) do
create(:ci_pipeline, :running,
project: merge_request.source_project, ref: merge_request.source_branch, sha: merge_request.diff_head_sha)

View File

@ -2312,18 +2312,10 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
describe "#auto_merge_strategy" do
subject { merge_request.auto_merge_strategy }
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
let(:merge_request) { create(:merge_request, :merge_when_checks_pass) }
it { is_expected.to eq('merge_when_checks_pass') }
context 'when merge_when_checks_pass is false' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it { is_expected.to eq('merge_when_pipeline_succeeds') }
end
context 'when auto merge is disabled' do
let(:merge_request) { create(:merge_request) }
@ -2334,17 +2326,9 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
describe '#default_auto_merge_strategy' do
subject { merge_request.default_auto_merge_strategy }
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
let(:merge_request) { create(:merge_request, :merge_when_checks_pass) }
it { is_expected.to eq(AutoMergeService::STRATEGY_MERGE_WHEN_CHECKS_PASS) }
context 'when merge_when_checks_pass feature flag is off' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it { is_expected.to eq(AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS) }
end
end
describe '#committers' do
@ -3938,7 +3922,6 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
where(:auto_merge_strategy, :skip_checks) do
'' | false
AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS | false
AutoMergeService::STRATEGY_MERGE_WHEN_CHECKS_PASS | true
end
@ -5845,7 +5828,7 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
let!(:merge_request1) do
create(
:merge_request,
:merge_when_pipeline_succeeds,
:merge_when_checks_pass,
target_project: project,
target_branch: 'master',
source_project: project,

View File

@ -1583,10 +1583,10 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
end
context 'merge_user' do
context 'when MR is set to MWPS' do
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds, source_project: project, target_project: project) }
context 'when MR is set to auto merge' do
let(:merge_request) { create(:merge_request, :merge_when_checks_pass, source_project: project, target_project: project) }
it 'returns user who set MWPS' do
it 'returns user who set to auto merge' do
get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user)
expect(response).to have_gitlab_http_status(:ok)
@ -3179,45 +3179,6 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
expect(response).to have_gitlab_http_status(:ok)
end
context 'when merge_when_checks_pass is off' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it_behaves_like 'merging with auto merge strategies'
it 'does not enable auto merge if MR is not mergeable and only_allow_merge_if_pipeline_succeeds is true' do
allow_any_instance_of(MergeRequest)
.to receive_messages(
head_pipeline: pipeline,
diff_head_pipeline: pipeline
)
merge_request.update!(title: 'Draft: 1234')
project.update_attribute(:only_allow_merge_if_pipeline_succeeds, true)
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user), params: { merge_when_pipeline_succeeds: true }
expect(response).to have_gitlab_http_status(:method_not_allowed)
expect(merge_request.reload.state).to eq('opened')
end
context 'when the pipeline failed' do
let(:pipeline) { create(:ci_pipeline, :failed, project: project) }
it 'does not enable auto merge if the pipeline failed and only_allow_merge_if_pipeline_succeeds is true' do
allow_any_instance_of(MergeRequest).to receive_messages(head_pipeline: pipeline, diff_head_pipeline: pipeline)
project.update_attribute(:only_allow_merge_if_pipeline_succeeds, true)
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user), params: { merge_when_pipeline_succeeds: true }
expect(response).to have_gitlab_http_status(:method_not_allowed)
expect(merge_request.reload.state).to eq('opened')
end
end
end
it_behaves_like 'merging with auto merge strategies'
it 'enables auto merge if the MR is not mergeable and only_allow_merge_if_pipeline_succeeds is true' do
@ -3990,7 +3951,7 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc
describe 'POST :id/merge_requests/:merge_request_iid/cancel_merge_when_pipeline_succeeds' do
before do
::AutoMergeService.new(merge_request.target_project, user).execute(merge_request, AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
::AutoMergeService.new(merge_request.target_project, user).execute(merge_request, AutoMergeService::STRATEGY_MERGE_WHEN_CHECKS_PASS)
end
it 'removes the merge_when_pipeline_succeeds status' do

View File

@ -49,21 +49,11 @@ RSpec.describe MergeRequestPollWidgetEntity do
end
context 'when auto merge is enabled' do
let(:resource) { create(:merge_request, :merge_when_pipeline_succeeds) }
let(:resource) { create(:merge_request, :merge_when_checks_pass) }
it 'returns auto merge related information' do
expect(subject[:auto_merge_strategy]).to eq('merge_when_checks_pass')
end
context 'when merge_when_checks_pass is false' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it 'returns auto merge related information' do
expect(subject[:auto_merge_strategy]).to eq('merge_when_pipeline_succeeds')
end
end
end
context 'when auto merge is not enabled' do
@ -83,16 +73,6 @@ RSpec.describe MergeRequestPollWidgetEntity do
it 'returns available auto merge strategies' do
expect(subject[:available_auto_merge_strategies]).to eq(%w[merge_when_checks_pass])
end
context 'when the merge_when_checks_pass is false' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it 'returns available auto merge strategies' do
expect(subject[:available_auto_merge_strategies]).to eq(%w[merge_when_pipeline_succeeds])
end
end
end
describe 'squash defaults for projects' do

View File

@ -56,32 +56,6 @@ RSpec.describe AutoMerge::BaseService, feature_category: :code_review_workflow d
end
end
context 'when strategy is merge when pipeline succeeds' do
let(:service) { AutoMerge::MergeWhenPipelineSucceedsService.new(project, user) }
before do
pipeline = build(:ci_pipeline)
allow(merge_request).to receive(:diff_head_pipeline) { pipeline }
end
it 'sets the auto merge strategy' do
subject
merge_request.reload
expect(merge_request.auto_merge_strategy).to eq(AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
end
it 'returns activated strategy name' do
is_expected.to eq(AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS.to_sym)
end
it 'calls AutoMergeProcessWorker' do
expect(AutoMergeProcessWorker).to receive(:perform_async).with({ 'merge_request_id' => merge_request.id }).once
subject
end
end
context 'when failed to save merge request' do
before do
allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid }
@ -133,7 +107,7 @@ RSpec.describe AutoMerge::BaseService, feature_category: :code_review_workflow d
describe '#update' do
subject { service.update(merge_request) } # rubocop:disable Rails/SaveBang
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
let(:merge_request) { create(:merge_request, :merge_when_checks_pass) }
context 'when merge params are specified' do
let(:params) do
@ -178,7 +152,7 @@ RSpec.describe AutoMerge::BaseService, feature_category: :code_review_workflow d
'should_remove_source_branch' => false,
'commit_message' => "Merge branch 'patch-12' into 'master'",
'squash_commit_message' => "Update README.md",
'auto_merge_strategy' => 'merge_when_pipeline_succeeds'
'auto_merge_strategy' => 'merge_when_checks_pass'
})
end
@ -207,7 +181,7 @@ RSpec.describe AutoMerge::BaseService, feature_category: :code_review_workflow d
describe '#cancel' do
subject { service.cancel(merge_request) }
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
let(:merge_request) { create(:merge_request, :merge_when_checks_pass) }
it_behaves_like 'Canceled or Dropped'
@ -253,7 +227,7 @@ RSpec.describe AutoMerge::BaseService, feature_category: :code_review_workflow d
describe '#abort' do
subject { service.abort(merge_request, reason) }
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
let(:merge_request) { create(:merge_request, :merge_when_checks_pass) }
let(:reason) { 'an error' }
it_behaves_like 'Canceled or Dropped'

View File

@ -15,12 +15,6 @@ RSpec.describe AutoMerge::MergeWhenChecksPassService, feature_category: :code_re
describe '#available_for?' do
subject { service.available_for?(mr_merge_if_green_enabled) }
let(:feature_flag) { true }
before do
stub_feature_flags(merge_when_checks_pass: feature_flag)
end
context 'when immediately mergeable' do
context 'when a non active pipeline' do
before do
@ -47,24 +41,12 @@ RSpec.describe AutoMerge::MergeWhenChecksPassService, feature_category: :code_re
end
end
context 'when merge when checks pass flag is off' do
let(:feature_flag) { false }
it { is_expected.to eq false }
end
context 'when draft status' do
before do
mr_merge_if_green_enabled.update!(title: 'Draft: check')
end
it { is_expected.to eq true }
context 'when merge_when_checks_pass flag is off' do
let(:feature_flag) { false }
it { is_expected.to eq false }
end
end
context 'when discussions open' do
@ -75,12 +57,6 @@ RSpec.describe AutoMerge::MergeWhenChecksPassService, feature_category: :code_re
end
it { is_expected.to eq true }
context 'when merge_when_checks_pass flag is off' do
let(:feature_flag) { false }
it { is_expected.to eq false }
end
end
context 'when pipline is active' do
@ -94,12 +70,6 @@ RSpec.describe AutoMerge::MergeWhenChecksPassService, feature_category: :code_re
end
it { is_expected.to eq true }
context 'when merge_when_checks_pass flag is off' do
let(:feature_flag) { false }
it { is_expected.to eq false }
end
end
context 'when the user does not have permission to merge' do

View File

@ -5,12 +5,6 @@ require 'spec_helper'
RSpec.describe AutoMerge::MergeWhenPipelineSucceedsService, feature_category: :code_review_workflow do
include_context 'for auto_merge strategy context'
let(:merge_when_checks_pass_ff) { false }
before do
stub_feature_flags(merge_when_checks_pass: merge_when_checks_pass_ff)
end
describe "#available_for?" do
subject { service.available_for?(mr_merge_if_green_enabled) }
@ -27,19 +21,7 @@ RSpec.describe AutoMerge::MergeWhenPipelineSucceedsService, feature_category: :c
mr_merge_if_green_enabled.update_head_pipeline
end
it { is_expected.to be_truthy }
context 'when merge when checks ff is true' do
let(:merge_when_checks_pass_ff) { true }
it { is_expected.to be_falsey }
end
it 'memoizes the result' do
expect(mr_merge_if_green_enabled).to receive(:can_be_merged_by?).once.and_call_original
2.times { is_expected.to be_truthy }
end
it { is_expected.to be_falsey }
context 'when the head pipeline succeeded' do
let(:pipeline_status) { :success }

View File

@ -52,17 +52,7 @@ RSpec.describe AutoMergeService, feature_category: :code_review_workflow do
is_expected.to include('merge_when_checks_pass')
end
context 'when merge_when_checks_pass is off' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it 'returns available strategies' do
is_expected.to include('merge_when_pipeline_succeeds')
end
end
context 'when the head piipeline succeeded' do
context 'when the head pipeline succeeded' do
let(:pipeline_status) { :success }
it 'returns available strategies' do
@ -157,19 +147,15 @@ RSpec.describe AutoMergeService, feature_category: :code_review_workflow do
end
end
context 'when the strategy is MWPS and merge_when_checks_pass is off' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
context 'when the strategy is MWPS' do
let(:strategy) { AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS }
it 'delegates to a relevant service instance' do
it 'does not call execute and returns failed' do
expect_next_instance_of(AutoMerge::MergeWhenPipelineSucceedsService) do |service|
expect(service).to receive(:execute).with(merge_request)
expect(service).not_to receive(:execute).with(merge_request)
end
subject
expect(subject).to eq(:failed)
end
end
@ -213,10 +199,6 @@ RSpec.describe AutoMergeService, feature_category: :code_review_workflow do
context 'when the merge request is MWPS' do
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it 'delegates to a relevant service instance' do
expect_next_instance_of(AutoMerge::MergeWhenPipelineSucceedsService) do |service|
expect(service).to receive(:update).with(merge_request)
@ -254,10 +236,6 @@ RSpec.describe AutoMergeService, feature_category: :code_review_workflow do
context 'when the merge request is MWPS' do
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it 'delegates to a relevant service instance' do
expect_next_instance_of(AutoMerge::MergeWhenPipelineSucceedsService) do |service|
expect(service).to receive(:process).with(merge_request)
@ -294,10 +272,6 @@ RSpec.describe AutoMergeService, feature_category: :code_review_workflow do
context 'when the merge request is MWPS' do
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it 'delegates to a relevant service instance' do
expect_next_instance_of(AutoMerge::MergeWhenPipelineSucceedsService) do |service|
expect(service).to receive(:cancel).with(merge_request)
@ -338,10 +312,6 @@ RSpec.describe AutoMergeService, feature_category: :code_review_workflow do
context 'when the merge request is MWPS' do
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it 'delegates to a relevant service instance' do
expect_next_instance_of(AutoMerge::MergeWhenPipelineSucceedsService) do |service|
expect(service).to receive(:abort).with(merge_request, error)

View File

@ -6,7 +6,7 @@ RSpec.describe Discussions::ResolveService, feature_category: :code_review_workf
describe '#execute' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user, developer_of: project) }
let_it_be(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds, source_project: project) }
let_it_be(:merge_request) { create(:merge_request, :merge_when_checks_pass, source_project: project) }
let(:discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion }
let(:service) { described_class.new(project, user, one_or_more_discussions: discussion) }
@ -46,18 +46,6 @@ RSpec.describe Discussions::ResolveService, feature_category: :code_review_workf
.to publish_event(MergeRequests::DiscussionsResolvedEvent)
.with(current_user_id: user.id, merge_request_id: merge_request.id)
end
context 'when merge_when_checks_pass is false' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it 'schedules an auto-merge' do
expect(AutoMergeProcessWorker).to receive(:perform_async)
service.execute
end
end
end
context 'when not all discussions are resolved' do
@ -66,18 +54,6 @@ RSpec.describe Discussions::ResolveService, feature_category: :code_review_workf
it 'does not publish the discussions resolved event' do
expect { service.execute }.not_to publish_event(MergeRequests::DiscussionsResolvedEvent)
end
context 'when merge_when_checks_pass is false' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it 'schedules an auto-merge' do
expect(AutoMergeProcessWorker).to receive(:perform_async)
described_class.new(project, user, one_or_more_discussions: [discussion, other_discussion]).execute
end
end
end
it 'sends GraphQL triggers' do

View File

@ -6,7 +6,7 @@ RSpec.describe Discussions::UnresolveService, feature_category: :code_review_wor
describe "#execute" do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user, developer_of: project) }
let_it_be(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds, source_project: project) }
let_it_be(:merge_request) { create(:merge_request, :merge_when_checks_pass, source_project: project) }
let(:discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion }

View File

@ -58,7 +58,7 @@ RSpec.describe MergeRequests::CloseService, feature_category: :code_review_workf
end
context 'when auto merge is enabled' do
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
let(:merge_request) { create(:merge_request, :merge_when_checks_pass) }
it 'cancels the auto merge' do
expect(@merge_request).not_to be_auto_merge_enabled

View File

@ -119,16 +119,6 @@ RSpec.describe MergeRequests::MergeOrchestrationService, feature_category: :code
it 'fetches preferred auto merge strategy' do
is_expected.to eq(AutoMergeService::STRATEGY_MERGE_WHEN_CHECKS_PASS)
end
context 'when merge_when_checks_pass feature is off' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it 'fetches preferred auto merge strategy' do
is_expected.to eq(AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
end
end
end
context 'when merge request cannot be merged automatically' do

View File

@ -198,11 +198,11 @@ RSpec.describe MergeRequests::PostMergeService, feature_category: :code_review_w
let(:params) { { delete_source_branch: true } }
it 'aborts auto merges' do
mr_1 = create(:merge_request, :merge_when_pipeline_succeeds, target_branch: merge_request.source_branch,
mr_1 = create(:merge_request, :merge_when_checks_pass, target_branch: merge_request.source_branch,
source_branch: "test", source_project: merge_request.project)
mr_2 = create(:merge_request, :merge_when_checks_pass, target_branch: merge_request.source_branch,
source_branch: "feature", source_project: merge_request.project)
mr_3 = create(:merge_request, :merge_when_pipeline_succeeds, target_branch: 'feature',
mr_3 = create(:merge_request, :merge_when_checks_pass, target_branch: 'feature',
source_branch: 'second', source_project: merge_request.project)
expect(merge_request.source_project.merge_requests.with_auto_merge_enabled).to contain_exactly(mr_1, mr_2, mr_3)
@ -213,30 +213,11 @@ RSpec.describe MergeRequests::PostMergeService, feature_category: :code_review_w
context 'when source branch is not be deleted' do
it 'does not abort any auto merges' do
mr_1 = create(:merge_request, :merge_when_pipeline_succeeds, target_branch: merge_request.source_branch,
mr_1 = create(:merge_request, :merge_when_checks_pass, target_branch: merge_request.source_branch,
source_branch: "test", source_project: merge_request.project)
mr_2 = create(:merge_request, :merge_when_checks_pass, target_branch: merge_request.source_branch,
source_branch: "feature", source_project: merge_request.project)
mr_3 = create(:merge_request, :merge_when_pipeline_succeeds, target_branch: 'feature',
source_branch: 'second', source_project: merge_request.project)
expect(merge_request.source_project.merge_requests.with_auto_merge_enabled).to contain_exactly(mr_1, mr_2, mr_3)
subject
expect(merge_request.source_project.merge_requests.with_auto_merge_enabled).to contain_exactly(mr_1, mr_2, mr_3)
end
end
context 'when merge_when_checks_pass is disabled' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it 'does not aborts any auto merges' do
mr_1 = create(:merge_request, :merge_when_pipeline_succeeds, target_branch: merge_request.source_branch,
source_branch: "test", source_project: merge_request.project)
mr_2 = create(:merge_request, :merge_when_checks_pass, target_branch: merge_request.source_branch,
source_branch: "feature", source_project: merge_request.project)
mr_3 = create(:merge_request, :merge_when_pipeline_succeeds, target_branch: 'feature',
mr_3 = create(:merge_request, :merge_when_checks_pass, target_branch: 'feature',
source_branch: 'second', source_project: merge_request.project)
expect(merge_request.source_project.merge_requests.with_auto_merge_enabled).to contain_exactly(mr_1, mr_2, mr_3)

View File

@ -139,21 +139,6 @@ RSpec.describe MergeRequests::PushOptionsHandlerService, feature_category: :sour
expect(last_mr.merge_user).to eq(user1)
expect(last_mr.merge_params['sha']).to eq(change[:newrev])
end
context 'when merge_when_checks_pass is false' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it 'sets auto_merge_enabled' do
service.execute
expect(last_mr.auto_merge_enabled).to eq(true)
expect(last_mr.auto_merge_strategy).to eq(AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
expect(last_mr.merge_user).to eq(user1)
expect(last_mr.merge_params['sha']).to eq(change[:newrev])
end
end
end
shared_examples_for 'a service that can remove the source branch when it is merged' do

View File

@ -26,7 +26,7 @@ RSpec.describe MergeRequests::RefreshService, feature_category: :code_review_wor
target_branch: 'feature',
target_project: @project,
auto_merge_enabled: true,
auto_merge_strategy: AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS,
auto_merge_strategy: AutoMergeService::STRATEGY_MERGE_WHEN_CHECKS_PASS,
merge_user: @user
)
@ -37,7 +37,7 @@ RSpec.describe MergeRequests::RefreshService, feature_category: :code_review_wor
target_branch: 'test',
target_project: @project,
auto_merge_enabled: true,
auto_merge_strategy: AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS,
auto_merge_strategy: AutoMergeService::STRATEGY_MERGE_WHEN_CHECKS_PASS,
merge_user: @user
)
@ -543,6 +543,15 @@ RSpec.describe MergeRequests::RefreshService, feature_category: :code_review_wor
context 'With merged MR that contains the same SHA' do
before do
@merge_request.head_pipeline = create(
:ci_pipeline,
:success,
project: @merge_request.source_project,
ref: @merge_request.source_branch,
sha: @merge_request.diff_head_sha)
@merge_request.update_head_pipeline
# Merged via UI
MergeRequests::MergeService
.new(project: @merge_request.target_project, current_user: @user, params: { sha: @merge_request.diff_head_sha })
@ -1012,7 +1021,7 @@ RSpec.describe MergeRequests::RefreshService, feature_category: :code_review_wor
target_project: project,
merge_user: user,
auto_merge_enabled: true,
auto_merge_strategy: AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS
auto_merge_strategy: AutoMergeService::STRATEGY_MERGE_WHEN_CHECKS_PASS
)
end
@ -1030,7 +1039,7 @@ RSpec.describe MergeRequests::RefreshService, feature_category: :code_review_wor
merge_request.reload
end
it 'aborts MWPS for merge requests' do
it 'aborts auto merge for merge requests' do
expect(merge_request.auto_merge_enabled?).to be_falsey
expect(merge_request.merge_user).to be_nil
end
@ -1038,7 +1047,7 @@ RSpec.describe MergeRequests::RefreshService, feature_category: :code_review_wor
context 'when merge params contains up-to-date sha' do
let(:merge_sha) { newrev }
it 'maintains MWPS for merge requests' do
it 'maintains auto merge for merge requests' do
expect(merge_request.auto_merge_enabled?).to be_truthy
expect(merge_request.merge_user).to eq(user)
end

View File

@ -857,7 +857,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
context 'when auto merge is enabled and target branch changed' do
before do
AutoMergeService.new(project, user, { sha: merge_request.diff_head_sha }).execute(merge_request, AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
AutoMergeService.new(project, user, { sha: merge_request.diff_head_sha }).execute(merge_request, AutoMergeService::STRATEGY_MERGE_WHEN_CHECKS_PASS)
end
it 'calls MergeRequests::ResolveTodosService#async_execute' do
@ -920,16 +920,6 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
end
end
context 'when merge_when_checks_pass is disabled' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it 'does not publish a DraftStateChangeEvent' do
expect { update_merge_request(title: 'New title') }.not_to publish_event(MergeRequests::DraftStateChangeEvent)
end
end
context 'when removing through wip_event param' do
it 'removes Draft from the title' do
expect { update_merge_request({ wip_event: "ready" }) }
@ -967,16 +957,6 @@ RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_re
end
end
context 'when merge_when_checks_pass is disabled' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it 'does not publish a DraftStateChangeEvent' do
expect { update_merge_request(title: 'Draft: New title') }.not_to publish_event(MergeRequests::DraftStateChangeEvent)
end
end
it 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(merge_request)

View File

@ -33,17 +33,5 @@ RSpec.shared_examples 'process auto merge from event worker' do
.not_to raise_exception
end
end
context 'when feature flag "merge_when_checks_pass" is disabled' do
before do
stub_feature_flags(merge_when_checks_pass: false)
end
it "doesn't call AutoMergeService" do
expect(AutoMergeService).not_to receive(:new)
consume_event(subscriber: described_class, event: event)
end
end
end
end