Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-10-31 03:14:13 +00:00
parent 747345dee0
commit deba779683
78 changed files with 1662 additions and 323 deletions

View File

@ -4187,7 +4187,6 @@ RSpec/FeatureCategory:
- 'spec/views/shared/_label_row.html.haml_spec.rb'
- 'spec/views/shared/_milestones_sort_dropdown.html.haml_spec.rb'
- 'spec/views/shared/gitlab_version/_security_patch_upgrade_alert.html.haml_spec.rb'
- 'spec/views/shared/groups/_dropdown.html.haml_spec.rb'
- 'spec/views/shared/issuable/_sidebar.html.haml_spec.rb'
- 'spec/views/shared/milestones/_issuable.html.haml_spec.rb'
- 'spec/views/shared/milestones/_issuables.html.haml_spec.rb'

View File

@ -0,0 +1,102 @@
<script>
import FilteredSearchAndSort from '~/groups_projects/components/filtered_search_and_sort.vue';
import {
FILTERED_SEARCH_NAMESPACE,
FILTERED_SEARCH_TERM_KEY,
SORT_DIRECTION_ASC,
SORT_DIRECTION_DESC,
SORT_OPTION_CREATED_DATE,
SORT_OPTIONS,
} from '~/admin/groups/constants';
import { RECENT_SEARCHES_STORAGE_KEY_GROUPS } from '~/filtered_search/recent_searches_storage_keys';
import { objectToQuery, queryToObject, visitUrl } from '~/lib/utils/url_utility';
export default {
components: {
FilteredSearchAndSort,
},
computed: {
defaultSortOption() {
return SORT_OPTION_CREATED_DATE;
},
defaultSortBy() {
return `${this.defaultSortOption.value}_${SORT_DIRECTION_DESC}`;
},
queryAsObject() {
return queryToObject(document.location.search);
},
queryAsObjectWithoutPagination() {
const { page, ...queryAsObject } = this.queryAsObject;
return queryAsObject;
},
sortByQuery() {
return this.queryAsObject.sort;
},
sortBy() {
return this.sortByQuery || this.defaultSortBy;
},
sortOptions() {
return SORT_OPTIONS;
},
activeSortOption() {
return (
this.sortOptions.find((option) => this.sortBy.includes(option.value)) ||
this.defaultSortOption
);
},
isAscending() {
return this.sortBy.endsWith(SORT_DIRECTION_ASC);
},
},
methods: {
visitUrlWithQueryObject(queryObject) {
return visitUrl(`?${objectToQuery(queryObject)}`);
},
onSortChange(sortBy, isAscending) {
const sort = `${sortBy}_${isAscending ? SORT_DIRECTION_ASC : SORT_DIRECTION_DESC}`;
this.visitUrlWithQueryObject({ ...this.queryAsObjectWithoutPagination, sort });
},
onSortDirectionChange(isAscending) {
this.onSortChange(this.activeSortOption.value, isAscending);
},
onSortByChange(sortBy) {
this.onSortChange(sortBy, this.isAscending);
},
onFilter(filtersQuery) {
const queryObject = { ...filtersQuery };
if (this.sortByQuery) {
queryObject.sort = this.sortByQuery;
}
this.visitUrlWithQueryObject(queryObject);
},
},
filteredSearch: {
recentSearchesStorageKey: RECENT_SEARCHES_STORAGE_KEY_GROUPS,
namespace: FILTERED_SEARCH_NAMESPACE,
termKey: FILTERED_SEARCH_TERM_KEY,
tokens: [],
},
};
</script>
<template>
<div class="gl-mb-4" data-testid="admin-groups-filtered-search-and-sort">
<filtered-search-and-sort
:filtered-search-namespace="$options.filteredSearch.namespace"
:filtered-search-tokens="$options.filteredSearch.tokens"
:filtered-search-term-key="$options.filteredSearch.termKey"
:filtered-search-recent-searches-storage-key="
$options.filteredSearch.recentSearchesStorageKey
"
:is-ascending="isAscending"
:sort-options="sortOptions"
:active-sort-option="activeSortOption"
:filtered-search-query="queryAsObject"
@filter="onFilter"
@sort-direction-change="onSortDirectionChange"
@sort-by-change="onSortByChange"
/>
</div>
</template>

View File

@ -0,0 +1,39 @@
import { __ } from '~/locale';
export const FILTERED_SEARCH_NAMESPACE = 'admin-groups';
export const FILTERED_SEARCH_TERM_KEY = 'name';
export const SORT_DIRECTION_ASC = 'asc';
export const SORT_DIRECTION_DESC = 'desc';
const NAME = 'name';
const CREATED = 'created';
const LATEST_ACTIVITY = 'latest_activity';
const STORAGE_SIZE = 'storage_size';
export const SORT_OPTION_NAME = {
text: __('Name'),
value: NAME,
};
export const SORT_OPTION_CREATED_DATE = {
text: __('Created date'),
value: CREATED,
};
export const SORT_OPTION_UPDATED_DATE = {
text: __('Updated date'),
value: LATEST_ACTIVITY,
};
export const SORT_OPTION_STORAGE_SIZE = {
text: __('Storage size'),
value: STORAGE_SIZE,
};
export const SORT_OPTIONS = [
SORT_OPTION_NAME,
SORT_OPTION_CREATED_DATE,
SORT_OPTION_UPDATED_DATE,
SORT_OPTION_STORAGE_SIZE,
];

View File

@ -0,0 +1,16 @@
import Vue from 'vue';
import FilteredSearchAndSort from './components/filtered_search_and_sort.vue';
export const initAdminGroupsFilteredSearchAndSort = () => {
const el = document.getElementById('js-admin-groups-filtered-search-and-sort');
if (!el) return false;
return new Vue({
el,
name: 'AdminGroupsFilteredSearchAndSort',
render(createElement) {
return createElement(FilteredSearchAndSort);
},
});
};

View File

@ -199,7 +199,7 @@ export default {
<code>&lt;name&gt;_&lt;type(optional)&gt;</code>
</template>
<template #example>
<code>ubuntu</code>
<code>ubuntu_containerd</code>
</template>
<!-- eslint-enable @gitlab/vue-require-i18n-strings -->
</gl-sprintf>

View File

@ -1,5 +1,6 @@
<script>
import { GlButton, GlIcon, GlAlert, GlTabs, GlTab, GlLink } from '@gitlab/ui';
import TabTitle from './tab_title.vue';
import MergeRequestsQuery from './merge_requests_query.vue';
import CollapsibleSection from './collapsible_section.vue';
import MergeRequest from './merge_request.vue';
@ -12,6 +13,7 @@ export default {
GlTabs,
GlTab,
GlLink,
TabTitle,
MergeRequestsQuery,
CollapsibleSection,
MergeRequest,
@ -33,6 +35,11 @@ export default {
this.currentTab = key;
this.$router.push({ path: key || '/' });
},
queriesForTab(tab) {
return tab.lists
.filter((l) => !l.hideCount)
.map((list) => ({ query: list.query, variables: list.variables }));
},
},
};
</script>
@ -43,22 +50,24 @@ export default {
<gl-tab
v-for="tab in tabs"
:key="tab.title"
:title="tab.title"
:active="tab.key === currentTab"
lazy
@click="clickTab(tab)"
>
<template #title>
<tab-title :title="tab.title" :queries="queriesForTab(tab)" :tab-key="tab.key" />
</template>
<merge-requests-query
v-for="(list, i) in tab.lists"
:key="`list_${i}`"
:query="list.query"
:variables="list.variables"
:hide-count="list.hideCount"
:class="{ 'gl-mb-4': i !== tab.lists.length - 1 }"
>
<template #default="{ mergeRequests, count, hasNextPage, loadMore, loading, error }">
<collapsible-section
:count="count"
:loading="loading || error"
:title="list.title"
:help-content="list.helpContent"
>

View File

@ -25,11 +25,6 @@ export default {
required: false,
default: null,
},
loading: {
type: Boolean,
required: false,
default: false,
},
},
data() {
return {
@ -86,9 +81,14 @@ export default {
@click="toggleOpen"
/>
{{ title }}
<gl-badge v-if="!loading || count" class="gl-ml-1" variant="neutral" size="sm">{{
count
}}</gl-badge>
<gl-badge
v-if="count !== null"
class="gl-ml-1"
variant="neutral"
size="sm"
data-testid="merge-request-list-count"
>{{ count }}</gl-badge
>
<gl-button
v-gl-tooltip
:title="helpContent"

View File

@ -1,22 +1,8 @@
<script>
import reviewerQuery from '../queries/reviewer.query.graphql';
import reviewerCountQuery from '../queries/reviewer_count.query.graphql';
import assigneeQuery from '../queries/assignee.query.graphql';
import assigneeCountQuery from '../queries/assignee_count.query.graphql';
import assigneeOrReviewerQuery from '../queries/assignee_or_reviewer.query.graphql';
import assigneeOrReviewerCountQuery from '../queries/assignee_or_reviewer_count.query.graphql';
import { QUERIES } from '../constants';
const PER_PAGE = 20;
const QUERIES = {
reviewRequestedMergeRequests: { dataQuery: reviewerQuery, countQuery: reviewerCountQuery },
assignedMergeRequests: { dataQuery: assigneeQuery, countQuery: assigneeCountQuery },
assigneeOrReviewerMergeRequests: {
dataQuery: assigneeOrReviewerQuery,
countQuery: assigneeOrReviewerCountQuery,
},
};
export default {
apollo: {
mergeRequests: {
@ -37,6 +23,9 @@ export default {
},
},
count: {
context: {
batchKey: 'MergeRequestListsCounts',
},
query() {
return QUERIES[this.query].countQuery;
},
@ -49,6 +38,9 @@ export default {
perPage: PER_PAGE,
};
},
skip() {
return this.hideCount;
},
},
},
props: {
@ -60,6 +52,11 @@ export default {
type: Object,
required: true,
},
hideCount: {
type: Boolean,
required: false,
default: false,
},
},
data() {
return {

View File

@ -0,0 +1,57 @@
<script>
import { GlBadge } from '@gitlab/ui';
import { QUERIES } from '../constants';
export default {
components: { GlBadge },
props: {
title: {
type: String,
required: true,
},
queries: {
type: Array,
required: true,
},
tabKey: {
type: String,
required: true,
},
},
data() {
return {
loading: true,
count: 0,
};
},
async mounted() {
this.fetchAllCounts();
},
methods: {
async fetchAllCounts() {
const counts = await Promise.all(
this.queries.map(({ query, variables }) => this.fetchCount({ query, variables })),
);
this.count = counts.reduce((acc, { data }) => acc + data.currentUser.mergeRequests.count, 0);
this.loading = false;
},
fetchCount({ query, variables }) {
return this.$apollo.query({
query: QUERIES[query].countQuery,
variables,
context: { batchKey: `MergeRequestTabsCounts_${this.tabKey}` },
});
},
},
};
</script>
<template>
<span>
{{ title }}
<gl-badge class="gl-tab-counter-badge" data-testid="tab-count">{{
loading ? '-' : count
}}</gl-badge>
</span>
</template>

View File

@ -0,0 +1,15 @@
import reviewerQuery from './queries/reviewer.query.graphql';
import reviewerCountQuery from './queries/reviewer_count.query.graphql';
import assigneeQuery from './queries/assignee.query.graphql';
import assigneeCountQuery from './queries/assignee_count.query.graphql';
import assigneeOrReviewerQuery from './queries/assignee_or_reviewer.query.graphql';
import assigneeOrReviewerCountQuery from './queries/assignee_or_reviewer_count.query.graphql';
export const QUERIES = {
reviewRequestedMergeRequests: { dataQuery: reviewerQuery, countQuery: reviewerCountQuery },
assignedMergeRequests: { dataQuery: assigneeQuery, countQuery: assigneeCountQuery },
assigneeOrReviewerMergeRequests: {
dataQuery: assigneeOrReviewerQuery,
countQuery: assigneeOrReviewerCountQuery,
},
};

View File

@ -1,3 +1,5 @@
import initConfirmDanger from '~/init_confirm_danger';
import { initAdminGroupsFilteredSearchAndSort } from '~/admin/groups/index';
initConfirmDanger();
initAdminGroupsFilteredSearchAndSort();

View File

@ -198,8 +198,8 @@ export default {
</gl-tooltip>
</div>
<div class="gl-flex gl-min-w-0 gl-grow gl-flex-col gl-flex-wrap">
<div class="gl-mb-2 gl-flex gl-min-w-0 gl-justify-between gl-gap-3">
<div class="item-title gl-min-w-0">
<div class="gl-mb-2 gl-min-w-0 gl-justify-between gl-gap-3 sm:gl-flex">
<div class="item-title gl-mb-2 gl-min-w-0 sm:gl-mb-0">
<span v-if="childItem.confidential">
<gl-icon
v-gl-tooltip.top
@ -221,7 +221,9 @@ export default {
{{ childItem.title }}
</gl-link>
</div>
<div class="gl-flex gl-shrink-0 gl-items-center gl-justify-end gl-gap-3">
<div
class="gl-flex gl-shrink-0 gl-flex-row-reverse gl-items-center gl-justify-end gl-gap-3"
>
<gl-avatars-inline
v-if="assignees.length"
:avatars="assignees"

View File

@ -47,8 +47,10 @@ export default {
</script>
<template>
<div class="gl-flex gl-justify-between">
<div class="gl-flex gl-flex-wrap gl-items-center gl-gap-3 gl-text-sm gl-text-secondary">
<div class="gl-justify-between sm:gl-flex">
<div
class="gl-mb-2 gl-flex gl-flex-wrap gl-items-center gl-gap-x-3 gl-gap-y-2 gl-text-sm gl-text-secondary sm:gl-mb-0"
>
<span>{{ reference }}</span>
<work-item-rolled-up-count
v-if="showRolledUpCounts"

View File

@ -70,7 +70,7 @@ export default {
v-if="showDetailedCount"
ref="info"
tabindex="0"
class="gl-flex gl-gap-3 gl-text-sm"
class="gl-flex gl-gap-3 gl-text-nowrap gl-text-sm"
data-testid="work-item-rolled-up-detailed-count"
>
<span

View File

@ -9,14 +9,12 @@ import {
WORK_ITEM_TYPE_VALUE_EPIC,
WIDGET_TYPE_HEALTH_STATUS,
} from '../../constants';
import WorkItemRolledUpCount from './work_item_rolled_up_count.vue';
export default {
components: {
GlIcon,
GlTooltip,
GlPopover,
WorkItemRolledUpCount,
WorkItemRolledUpHealthStatus: () =>
import(
'ee_component/work_items/components/work_item_links/work_item_rolled_up_health_status.vue'
@ -44,10 +42,6 @@ export default {
required: false,
default: null,
},
rolledUpCountsByType: {
type: Array,
required: true,
},
},
data() {
return {
@ -113,17 +107,13 @@ export default {
<template>
<div class="gl-flex">
<!-- Rolled up count -->
<work-item-rolled-up-count :rolled-up-counts-by-type="rolledUpCountsByType" />
<!-- END Rolled up count -->
<!-- Rolled up weight -->
<span
v-if="shouldRolledUpWeightBeVisible"
ref="weightData"
tabindex="0"
data-testid="work-item-rollup-weight"
class="gl-ml-3 gl-flex gl-cursor-help gl-items-center gl-gap-2 gl-font-normal gl-text-secondary"
class="gl-flex gl-cursor-help gl-items-center gl-gap-2 gl-font-normal gl-text-secondary sm:gl-ml-3"
>
<gl-icon name="weight" class="gl-text-secondary" />
<span data-testid="work-item-weight-value" class="gl-text-sm">{{ rolledUpWeight }}</span>

View File

@ -32,6 +32,7 @@ import WorkItemActionsSplitButton from './work_item_actions_split_button.vue';
import WorkItemLinksForm from './work_item_links_form.vue';
import WorkItemChildrenWrapper from './work_item_children_wrapper.vue';
import WorkItemRolledUpData from './work_item_rolled_up_data.vue';
import WorkItemRolledUpCount from './work_item_rolled_up_count.vue';
export default {
FORM_TYPES,
@ -47,6 +48,7 @@ export default {
WorkItemChildrenLoadMore,
WorkItemMoreActions,
WorkItemRolledUpData,
WorkItemRolledUpCount,
},
inject: ['hasSubepicsFeature'],
provide() {
@ -331,12 +333,28 @@ export default {
data-testid="work-item-tree"
>
<template #count>
<work-item-rolled-up-count
v-if="!isLoadingChildren"
class="gl-ml-2 sm:gl-ml-0"
:rolled-up-counts-by-type="rolledUpCountsByType"
/>
<work-item-rolled-up-data
v-if="!isLoadingChildren"
class="gl-hidden sm:gl-flex"
:work-item-id="workItemId"
:work-item-iid="workItemIid"
:work-item-type="workItemType"
:full-path="fullPath"
/>
</template>
<template #description>
<work-item-rolled-up-data
v-if="!isLoadingChildren"
class="gl-mt-2 sm:gl-hidden"
:work-item-id="workItemId"
:work-item-iid="workItemIid"
:work-item-type="workItemType"
:rolled-up-counts-by-type="rolledUpCountsByType"
:full-path="fullPath"
/>
</template>

View File

@ -176,7 +176,7 @@
}
.job-log-line-number {
color: $gray-500;
@apply gl-text-gray-500;
padding-right: $gl-padding-8;
margin-right: $gl-padding-8;
min-width: $job-line-number-width;

View File

@ -18,7 +18,6 @@ module Projects
urgency :low, [:show, :create_deploy_token]
def show
Gitlab::QueryLimiting.disable!('https://gitlab.com/gitlab-org/gitlab/-/issues/482942')
render_show
end

View File

@ -21,10 +21,6 @@ module ExploreHelper
request_path_with_options(options)
end
def filter_groups_path(options = {})
request_path_with_options(options)
end
def public_visibility_restricted?
Gitlab::VisibilityLevel.public_visibility_restricted?
end

View File

@ -503,6 +503,7 @@ module MergeRequestsHelper
helpContent: _('These merge requests merged after %{date}. You were an assignee or a reviewer.') % {
date: 2.weeks.ago.to_date.to_formatted_s(:long)
},
hideCount: true,
query: 'assigneeOrReviewerMergeRequests',
variables: {
state: 'merged',

View File

@ -73,23 +73,6 @@ module SortingHelper
}
end
def groups_sort_options_hash
{
sort_value_name => sort_title_name,
sort_value_name_desc => sort_title_name_desc,
sort_value_recently_created => sort_title_recently_created,
sort_value_oldest_created => sort_title_oldest_created,
sort_value_latest_activity => sort_title_recently_updated,
sort_value_oldest_activity => sort_title_oldest_updated
}
end
def admin_groups_sort_options_hash
groups_sort_options_hash.merge(
sort_value_largest_group => sort_title_largest_group
)
end
def milestones_sort_options_hash
{
sort_value_due_date_soon => sort_title_due_date_soon,

View File

@ -6,14 +6,9 @@
= link_button_to new_admin_group_path, variant: :confirm do
= _('New group')
.md:gl-flex.gl-min-w-0.gl-grow.row-content-block
= form_tag admin_groups_path, method: :get, class: 'js-search-form gl-w-full' do |f|
= hidden_field_tag :sort, @sort
.search-holder
.search-field-holder
= search_field_tag :name, params[:name].presence, class: "form-control search-text-input js-search-input", spellcheck: false, placeholder: 'Search by name', data: { testid: 'group-search-field' }
= sprite_icon('search', css_class: 'search-icon')
= render "shared/groups/dropdown", options_hash: admin_groups_sort_options_hash
#js-admin-groups-filtered-search-and-sort
-# This element takes up space while Vue is rendering to avoid page jump
.gl-h-12
- if @groups.any?
%ul.content-list

View File

@ -1,5 +0,0 @@
- options_hash = local_assigns.fetch(:options_hash, groups_sort_options_hash)
- groups_sort_options = options_hash.map { |value, title| { value: value, text: title, href: filter_groups_path(sort: value) } }
%div{ data: { testid: 'group_sort_by_dropdown' } }
= gl_redirect_listbox_tag groups_sort_options, project_list_sort_by, data: { placement: 'right' }

View File

@ -239,8 +239,15 @@ To access the Groups page:
For each group, the page displays their name, description, size, number of projects in the group,
number of members, and whether the group is private, internal, or public. To edit a group, in the group's row, select **Edit**. To delete the group, in the group's row, select **Delete**.
To change the sort order, select the sort dropdown list and select the desired order. The default
sort order is by **Last created**.
To change the sort order, select the sort dropdown list and choose the desired order.
You can sort groups by:
- Created date (default)
- Updated date
- Storage size
The storage size option sorts groups by the total storage used, including Git repositories
and Large File Storage (LFS) for all projects in the group. For more information, see [usage quotas](../user/storage_usage_quotas.md).
To search for groups by name, enter your criteria in the search field. The group search is case
insensitive, and applies partial matching.

View File

@ -458,6 +458,12 @@ This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#pagination-arguments):
`before: String`, `after: String`, `first: Int`, and `last: Int`.
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="queryduoworkflowworkflowsprojectpath"></a>`projectPath` | [`ID`](#id) | Full path of the project containing the workflows. |
### `Query.echo`
Testing endpoint to validate the API with.

View File

@ -269,7 +269,7 @@ positives.
| `CS_REGISTRY_PASSWORD` | `$CI_REGISTRY_PASSWORD` | Password for accessing a Docker registry requiring authentication. The default is only set if `$CS_IMAGE` resides at [`$CI_REGISTRY`](../../../ci/variables/predefined_variables.md). Not supported when [FIPS mode](../../../development/fips_compliance.md#enable-fips-mode) is enabled. |
| `CS_REGISTRY_USER` | `$CI_REGISTRY_USER` | Username for accessing a Docker registry requiring authentication. The default is only set if `$CS_IMAGE` resides at [`$CI_REGISTRY`](../../../ci/variables/predefined_variables.md). Not supported when [FIPS mode](../../../development/fips_compliance.md#enable-fips-mode) is enabled. |
| `CS_SEVERITY_THRESHOLD` | `UNKNOWN` | Severity level threshold. The scanner outputs vulnerabilities with severity level higher than or equal to this threshold. Supported levels are `UNKNOWN`, `LOW`, `MEDIUM`, `HIGH`, and `CRITICAL`. |
| `CS_TRIVY_JAVA_DB` | `"ghcr.io/aquasecurity/trivy-java-db"` | Specify an alternate location for the [trivy-java-db](https://github.com/aquasecurity/trivy-java-db) vulnerability database. |
| `CS_TRIVY_JAVA_DB` | `"registry.gitlab.com/gitlab-org/security-products/dependencies/trivy-java-db"` | Specify an alternate location for the [trivy-java-db](https://github.com/aquasecurity/trivy-java-db) vulnerability database. |
| `SECURE_LOG_LEVEL` | `info` | Set the minimum logging level. Messages of this logging level or higher are output. From highest to lowest severity, the logging levels are: `fatal`, `error`, `warn`, `info`, `debug`. |
| `TRIVY_TIMEOUT` | `5m0s` | Set the timeout for the scan. |

View File

@ -20,6 +20,7 @@ module Gitlab
autoload :GitlabConfig, 'gitlab/backup/cli/gitlab_config'
autoload :Metadata, 'gitlab/backup/cli/metadata'
autoload :Output, 'gitlab/backup/cli/output'
autoload :RepoType, 'gitlab/backup/cli/repo_type'
autoload :RestoreExecutor, 'gitlab/backup/cli/restore_executor'
autoload :Runner, 'gitlab/backup/cli/runner'
autoload :Shell, 'gitlab/backup/cli/shell'

View File

@ -51,20 +51,17 @@ module Gitlab
end
def execute_all_tasks
# TODO: when we migrate targets to the new codebase, recreate options to have only what we need here
# https://gitlab.com/gitlab-org/gitlab/-/issues/454906
options = ::Backup::Options.new(
remote_directory: backup_bucket,
container_registry_bucket: registry_bucket,
service_account_file: service_account_file
)
tasks = []
Gitlab::Backup::Cli::Tasks.build_each(context: context, options: options) do |task|
Gitlab::Backup::Cli::Tasks.build_each(context: context) do |task|
# This is a temporary hack while we move away from options and use config instead
# This hack will be removed as part of https://gitlab.com/gitlab-org/gitlab/-/issues/498455
task.set_registry_bucket(registry_bucket) if task.is_a?(Gitlab::Backup::Cli::Tasks::Registry)
Gitlab::Backup::Cli::Output.info("Executing Backup of #{task.human_name}...")
duration = measure_duration do
task.backup!(workdir, metadata.backup_id)
task.backup!(workdir)
tasks << task
end

View File

@ -104,7 +104,7 @@ module Gitlab
end
def config(object_type)
Gitlab.config[object_type]
gitlab_config[object_type]
end
def env
@ -112,6 +112,18 @@ module Gitlab
ENV["RAILS_ENV"].presence || ENV["RACK_ENV"].presence || "development")
end
def config_repositories_storages
gitlab_config.dig(env, 'repositories', 'storages')
end
def gitaly_backup_path
gitlab_config.dig(env, 'backup', 'gitaly_backup_path')
end
def gitaly_token
gitlab_config.dig(env, 'gitaly', 'token')
end
private
# Return the shared path used as a fallback base location to each blob type

View File

@ -6,6 +6,8 @@ module Gitlab
module Errors
autoload :DatabaseBackupError, 'gitlab/backup/cli/errors/database_backup_error'
autoload :FileBackupError, 'gitlab/backup/cli/errors/file_backup_error'
autoload :FileRestoreError, 'gitlab/backup/cli/errors/file_restore_error'
autoload :GitalyBackupError, 'gitlab/backup/cli/errors/gitaly_backup_error'
end
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
module Gitlab
module Backup
module Cli
module Errors
class FileRestoreError < StandardError
attr_reader :error_message
def initialize(error_message:)
super
@error_message = error_message
end
def message
"Restore operation failed: #{error_message}"
end
end
end
end
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
module Gitlab
module Backup
module Cli
module Errors
class GitalyBackupError < StandardError
attr_reader :error_message
def initialize(error_message = '')
super
@error_message = error_message
end
def message
"Repository Backup/Restore failed. #{error_message}"
end
end
end
end
end
end

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
module Gitlab
module Backup
module Cli
class RepoType
PROJECT = :project
WIKI = :wiki
SNIPPET = :snippet
DESIGN = :design
end
end
end
end

View File

@ -41,10 +41,6 @@ module Gitlab
execute_all_tasks
end
def backup_options
@backup_options ||= build_backup_options!
end
def metadata
@metadata ||= read_metadata!
end
@ -57,14 +53,16 @@ module Gitlab
private
def execute_all_tasks
# TODO: when we migrate targets to the new codebase, recreate options to have only what we need here
# https://gitlab.com/gitlab-org/gitlab/-/issues/454906
tasks = []
Gitlab::Backup::Cli::Tasks.build_each(context: context, options: backup_options) do |task|
Gitlab::Backup::Cli::Tasks.build_each(context: context) do |task|
# This is a temporary hack while we move away from options and use config instead
# This hack will be removed as part of https://gitlab.com/gitlab-org/gitlab/-/issues/498455
task.set_registry_bucket(registry_bucket) if task.is_a?(Gitlab::Backup::Cli::Tasks::Registry)
Gitlab::Backup::Cli::Output.info("Executing restoration of #{task.human_name}...")
duration = measure_duration do
tasks << { name: task.human_name, result: task.restore!(archive_directory, backup_id) }
tasks << { name: task.human_name, result: task.restore!(archive_directory) }
end
next if task.object_storage?
@ -87,15 +85,6 @@ module Gitlab
@metadata = Gitlab::Backup::Cli::Metadata::BackupMetadata.load!(archive_directory)
end
def build_backup_options!
::Backup::Options.new(
backup_id: backup_id,
remote_directory: backup_bucket,
container_registry_bucket: registry_bucket,
service_account_file: service_account_file
)
end
# @return [Pathname] temporary directory
def create_temporary_workdir!
# Ensure base directory exists

View File

@ -6,7 +6,11 @@ module Gitlab
module Targets
autoload :Target, 'gitlab/backup/cli/targets/target'
autoload :Database, 'gitlab/backup/cli/targets/database'
autoload :Files, 'gitlab/backup/cli/targets/files'
autoload :ObjectStorage, 'gitlab/backup/cli/targets/object_storage'
autoload :GitalyBackup, 'gitlab/backup/cli/targets/gitaly_backup'
autoload :GitalyClient, 'gitlab/backup/cli/targets/gitaly_client'
autoload :Repositories, 'gitlab/backup/cli/targets/repositories'
end
end
end

View File

@ -17,14 +17,16 @@ module Gitlab
].freeze
IGNORED_ERRORS_REGEXP = Regexp.union(IGNORED_ERRORS).freeze
def initialize(options:)
super(options: options)
def initialize
@errors = []
@force = options.force?
# This flag will be removed as part of https://gitlab.com/gitlab-org/gitlab/-/issues/494209
# This option will be reintroduced as part of
# https://gitlab.com/gitlab-org/gitlab/-/issues/498453
@force = false
end
def dump(destination_dir, _)
def dump(destination_dir)
FileUtils.mkdir_p(destination_dir)
each_database(destination_dir) do |backup_connection|
@ -74,7 +76,7 @@ module Gitlab
end
end
def restore(destination_dir, _)
def restore(destination_dir)
@errors = []
base_models_for_backup.each do |database_name, _|

View File

@ -0,0 +1,103 @@
# frozen_string_literal: true
module Gitlab
module Backup
module Cli
module Targets
class Files < Target
DEFAULT_EXCLUDE = ['lost+found'].freeze
attr_reader :excludes
# @param [String] storage_path
# @param [Array] excludes
def initialize(context, storage_path, excludes: [])
super(context)
@storage_path = storage_path
@excludes = excludes
end
def dump(destination)
archive_file = [destination, 'w', 0o600]
tar_command = Utils::Tar.new.pack_from_stdin_cmd(
target_directory: storage_realpath,
target: '.',
excludes: excludes)
compression_cmd = Utils::Compression.compression_command
pipeline = Shell::Pipeline.new(tar_command, compression_cmd)
result = pipeline.run!(output: archive_file)
return if success?(result)
raise Errors::FileBackupError.new(storage_realpath, destination)
end
def restore(source)
# Existing files will be handled in https://gitlab.com/gitlab-org/gitlab/-/issues/499876
if File.exist?(storage_realpath)
Output.warning "Ignoring existing files at #{storage_realpath} and continuing restore."
end
archive_file = source.to_s
tar_command = Utils::Tar.new.extract_from_stdin_cmd(target_directory: storage_realpath)
decompression_cmd = Utils::Compression.decompression_command
pipeline = Shell::Pipeline.new(decompression_cmd, tar_command)
result = pipeline.run!(input: archive_file)
return if success?(result)
raise Errors::FileRestoreError.new(error_message: result.stderr)
end
private
def success?(result)
return true if result.success?
return true if ignore_non_success?(
result.status_list[1].exitstatus,
result.stderr
)
false
end
def noncritical_warning_matcher
/^g?tar: \.: Cannot mkdir: No such file or directory$/
end
def ignore_non_success?(exitstatus, output)
# tar can exit with nonzero code:
# 1 - if some files changed (i.e. a CI job is currently writes to log)
# 2 - if it cannot create `.` directory (see issue https://gitlab.com/gitlab-org/gitlab/-/issues/22442)
# http://www.gnu.org/software/tar/manual/html_section/tar_19.html#Synopsis
# so check tar status 1 or stderr output against some non-critical warnings
if exitstatus == 1
Output.print_info "Ignoring tar exit status 1 'Some files differ': #{output}"
return true
end
# allow tar to fail with other non-success status if output contain non-critical warning
if noncritical_warning_matcher&.match?(output)
Output.print_info(
"Ignoring non-success exit status #{exitstatus} due to output of non-critical warning(s): #{output}")
return true
end
false
end
def storage_realpath
@storage_realpath ||= File.realpath(@storage_path)
end
end
end
end
end
end

View File

@ -0,0 +1,187 @@
# frozen_string_literal: true
module Gitlab
module Backup
module Cli
module Targets
class GitalyBackup
# Backup and restores repositories using gitaly-backup
#
# gitaly-backup can work in parallel and accepts a list of repositories
# through input pipe using a specific json format for both backup and restore
attr_reader :context
def initialize(context)
@context = context
end
def start(type, backup_repos_path, backup_id: nil, remove_all_repositories: nil)
raise Gitlab::Backup::Cli::Errors::GitalyBackupError, 'already started' if started?
FileUtils.rm_rf(backup_repos_path) if type == :create
@input_stream, stdout, @thread = Open3.popen2(
build_env,
bin_path,
*gitaly_backup_args(type, backup_repos_path.to_s, backup_id, remove_all_repositories)
)
@out_reader = Thread.new do
IO.copy_stream(stdout, $stdout)
end
end
def finish!
return unless started?
@input_stream.close
@thread.join
status = @thread.value
@thread = nil
return unless status.exitstatus != 0
raise Gitlab::Backup::Cli::Errors::GitalyBackupError,
"gitaly-backup exit status #{status.exitstatus}"
end
def enqueue(container, repo_type)
raise Gitlab::Backup::Cli::Errors::GitalyBackupError, 'not started' unless started?
raise Gitlab::Backup::Cli::Errors::GitalyBackupError, 'no container for repo type' unless container
storage, relative_path, gl_project_path, always_create = repository_info_for(container, repo_type)
schedule_backup_job(storage, relative_path, gl_project_path, always_create)
end
private
def repository_info_for(container, repo_type)
case repo_type
when RepoType::PROJECT
[container.repository_storage,
container.disk_path || container.full_path,
container.full_path,
true]
when RepoType::WIKI
wiki_repo_info(container)
when RepoType::SNIPPET
[container.repository_storage,
container.disk_path || container.full_path,
container.full_path,
false]
when RepoType::DESIGN
[design_repo_storage(container),
container.project.disk_path,
container.project.full_path,
false]
end
end
def design_repo_storage(container)
return container.repository.repository_storage if container.repository.respond_to?(:repository_storage)
container.repository_storage
end
def wiki_repo_info(container)
wiki = container.respond_to?(:wiki) ? container.wiki : container
[wiki.repository_storage,
wiki.disk_path || wiki.full_path,
wiki.full_path,
false]
end
def gitaly_backup_args(type, backup_repos_path, backup_id, remove_all_repositories)
command = case type
when :create
'create'
when :restore
'restore'
else
raise Gitlab::Backup::Cli::Errors::GitalyBackupError, "unknown backup type: #{type}"
end
args = [command] + ['-path', backup_repos_path, '-layout', 'manifest']
case type
when :create
args += ['-id', backup_id] if backup_id
when :restore
args += ['-remove-all-repositories', remove_all_repositories.join(',')] if remove_all_repositories
args += ['-id', backup_id] if backup_id
end
args
end
# Schedule a new backup job through a non-blocking JSON based pipe protocol
#
# @see https://gitlab.com/gitlab-org/gitaly/-/blob/master/doc/gitaly-backup.md
def schedule_backup_job(storage, relative_path, gl_project_path, always_create)
json_job = {
storage_name: storage,
relative_path: relative_path,
gl_project_path: gl_project_path,
always_create: always_create
}.to_json
@input_stream.puts(json_job)
end
def gitaly_servers
storages = context.config_repositories_storages
unless storages.keys
raise Gitlab::Backup::Cli::Errors::GitalyBackupError,
"No repositories' storages found."
end
storages.keys.index_with do |storage_name|
GitalyClient.new(storages, context.gitaly_token).connection_data(storage_name)
end
end
def gitaly_servers_encoded
Base64.strict_encode64(JSON.dump(gitaly_servers))
end
# These variables will be moved to a config file via
# https://gitlab.com/gitlab-org/gitlab/-/issues/500437
def default_cert_dir
ENV.fetch('SSL_CERT_DIR', OpenSSL::X509::DEFAULT_CERT_DIR)
end
def default_cert_file
ENV.fetch('SSL_CERT_FILE', OpenSSL::X509::DEFAULT_CERT_FILE)
end
def build_env
{
'SSL_CERT_FILE' => default_cert_file,
'SSL_CERT_DIR' => default_cert_dir,
'GITALY_SERVERS' => gitaly_servers_encoded
}.merge(current_env)
end
def current_env
ENV
end
def started?
@thread.present?
end
def bin_path
unless context.gitaly_backup_path.present?
raise Gitlab::Backup::Cli::Errors::GitalyBackupError,
'gitaly-backup binary not found and gitaly_backup_path is not configured'
end
File.absolute_path(context.gitaly_backup_path)
end
end
end
end
end
end

View File

@ -0,0 +1,42 @@
# frozen_string_literal: true
module Gitlab
module Backup
module Cli
module Targets
class GitalyClient
attr_reader :storages, :gitaly_token
def initialize(storages, gitaly_token)
@storages = storages
@gitaly_token = gitaly_token
end
def connection_data(storage)
raise "storage not found: #{storage.inspect}" if storages[storage].nil?
{ 'address' => address(storage), 'token' => token(storage) }
end
private
def address(storage)
address = storages[storage]['gitaly_address']
raise "storage #{storage.inspect} is missing a gitaly_address" unless address.present?
unless %w[tcp unix tls dns].include?(URI(address).scheme)
raise "Unsupported Gitaly address: " \
"#{address.inspect} does not use URL scheme 'tcp' or 'unix' or 'tls' or 'dns'"
end
address
end
def token(storage)
storages[storage]['gitaly_token'].presence || gitaly_token
end
end
end
end
end
end

View File

@ -12,14 +12,15 @@ module Gitlab
attr_accessor :object_type, :backup_bucket, :client, :config, :results
def initialize(object_type, options, config)
def initialize(object_type, remote_directory, config)
@object_type = object_type
@backup_bucket = options.remote_directory
@backup_bucket = remote_directory
@config = config
@client = ::Google::Cloud::StorageTransfer.storage_transfer_service
end
def dump(_, backup_id)
# @param [String] backup_id unique identifier for the backup
def dump(backup_id)
response = find_or_create_job(backup_id, "backup")
run_request = {
project_id: backup_job_spec(backup_id)[:project_id],
@ -28,7 +29,8 @@ module Gitlab
@results = client.run_transfer_job run_request
end
def restore(_, backup_id)
# @param [String] backup_id unique identifier for the backup
def restore(backup_id)
response = find_or_create_job(backup_id, "restore")
run_request = {
project_id: restore_job_spec(backup_id)[:project_id],

View File

@ -0,0 +1,96 @@
# frozen_string_literal: true
require 'yaml'
module Gitlab
module Backup
module Cli
module Targets
# Backup and restores repositories by querying the database
class Repositories < Target
def dump(destination)
strategy.start(:create, destination)
enqueue_consecutive
ensure
strategy.finish!
end
def restore(source)
strategy.start(:restore,
source,
remove_all_repositories: remove_all_repositories)
enqueue_consecutive
ensure
strategy.finish!
restore_object_pools
end
def strategy
@strategy ||= GitalyBackup.new(context)
end
private
def remove_all_repositories
context.config_repositories_storages.keys
end
def enqueue_consecutive
enqueue_consecutive_projects
enqueue_consecutive_snippets
end
def enqueue_consecutive_projects
project_relation.find_each(batch_size: 1000) do |project|
enqueue_project(project)
end
end
def enqueue_consecutive_snippets
snippet_relation.find_each(batch_size: 1000) { |snippet| enqueue_snippet(snippet) }
end
def enqueue_project(project)
strategy.enqueue(project, Gitlab::Backup::Cli::RepoType::PROJECT)
strategy.enqueue(project, Gitlab::Backup::Cli::RepoType::WIKI)
return unless project.design_management_repository
strategy.enqueue(project.design_management_repository, Gitlab::Backup::Cli::RepoType::DESIGN)
end
def enqueue_snippet(snippet)
strategy.enqueue(snippet, Gitlab::Backup::Cli::RepoType::SNIPPET)
end
def project_relation
Project.includes(:route, :group, :namespace)
end
def snippet_relation
Snippet.all
end
def restore_object_pools
PoolRepository.includes(:source_project).find_each do |pool|
Output.info " - Object pool #{pool.disk_path}..."
unless pool.source_project
Output.info " - Object pool #{pool.disk_path}... [SKIPPED]"
next
end
pool.state = 'none'
pool.save
pool.schedule
end
end
end
end
end
end
end

View File

@ -6,14 +6,10 @@ module Gitlab
module Targets
# Abstract class used to implement a Backup Target
class Target
# Backup creation and restore option flags
#
# TODO: Migrate to a unified backup specific Options implementation
# @return [::Backup::Options]
attr_reader :options
attr_reader :context
def initialize(options:)
@options = options
def initialize(context = nil)
@context = context
end
def asynchronous?
@ -23,13 +19,12 @@ module Gitlab
# dump task backup to `path`
#
# @param [String] path fully qualified backup task destination
# @param [String] backup_id unique identifier for the backup
def dump(path, backup_id)
def dump(path)
raise NotImplementedError
end
# restore task backup from `path`
def restore(path, backup_id)
def restore(path)
raise NotImplementedError
end
end

View File

@ -14,7 +14,7 @@ module Gitlab
private
def local
::Backup::Targets::Files.new(nil, storage_path, options: options, excludes: ['tmp'])
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path, excludes: ['tmp'])
end
def storage_path = context.ci_job_artifacts_path

View File

@ -14,7 +14,7 @@ module Gitlab
private
def target
::Backup::Targets::Files.new(nil, storage_path, options: options)
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path)
end
def storage_path = context.ci_builds_path

View File

@ -14,7 +14,7 @@ module Gitlab
private
def local
::Backup::Targets::Files.new(nil, storage_path, options: options, excludes: ['tmp'])
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path, excludes: ['tmp'])
end
def storage_path = context.ci_secure_files_path

View File

@ -16,7 +16,7 @@ module Gitlab
private
def target
::Gitlab::Backup::Cli::Targets::Database.new(options: options)
::Gitlab::Backup::Cli::Targets::Database.new
end
end
end

View File

@ -14,7 +14,7 @@ module Gitlab
private
def local
::Backup::Targets::Files.new(nil, storage_path, options: options)
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path)
end
def storage_path = context.ci_lfs_path

View File

@ -14,7 +14,7 @@ module Gitlab
private
def local
::Backup::Targets::Files.new(nil, storage_path, options: options, excludes: ['tmp'])
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path, excludes: ['tmp'])
end
def storage_path = context.packages_path

View File

@ -18,7 +18,7 @@ module Gitlab
private
def local
::Backup::Targets::Files.new(nil, storage_path, options: options, excludes: [LEGACY_PAGES_TMP_PATH])
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path, excludes: [LEGACY_PAGES_TMP_PATH])
end
def storage_path = context.pages_path

View File

@ -13,16 +13,22 @@ module Gitlab
def destination_path = 'registry.tar.gz'
attr_reader :registry_bucket
def set_registry_bucket(registry_bucket)
@registry_bucket = registry_bucket
end
def object_storage?
!options.container_registry_bucket.nil?
!registry_bucket.nil?
end
# Registry does not use consolidated object storage config.
def config
settings = {
object_store: {
connection: context.config('object_store').connection.to_hash,
remote_directory: options.container_registry_bucket
connection: context.gitlab_config('object_store').connection.to_hash,
remote_directory: registry_bucket
}
}
GitlabSettings::Options.build(settings)
@ -31,7 +37,7 @@ module Gitlab
private
def local
::Backup::Targets::Files.new(nil, storage_path, options: options)
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path)
end
def storage_path = context.registry_path

View File

@ -16,24 +16,7 @@ module Gitlab
private
def target
# TODO: migrate to the new codebase and rewrite portions to format output in a readable way
::Backup::Targets::Repositories.new($stdout,
strategy: gitaly_strategy,
options: options,
storages: options.repositories_storages,
paths: options.repositories_paths,
skip_paths: options.skip_repositories_paths
)
end
def gitaly_strategy
# TODO: migrate to the new codebase and rewrite portions to format output in a readable way
::Backup::GitalyBackup.new($stdout,
incremental: options.incremental?,
max_parallelism: options.max_parallelism,
storage_parallelism: options.max_storage_parallelism,
server_side: false
)
Gitlab::Backup::Cli::Targets::Repositories.new(context)
end
end
end

View File

@ -5,36 +5,34 @@ module Gitlab
module Cli
module Tasks
class Task
attr_reader :options, :context
attr_writer :target
attr_reader :context
def initialize(context:)
@context = context
end
# Identifier used as parameter in the CLI to skip from executing
def self.id
raise NotImplementedError
end
def initialize(context:, options:)
@context = context
@options = options
end
# Initiate a backup
#
# @param [Pathname] backup_path a path where to store the backups
# @param [String] backup_id
def backup!(backup_path, backup_id)
def backup!(backup_path)
backup_output = backup_path.join(destination_path)
# During test, we ensure storage exists so we can run against `RAILS_ENV=test` environment
FileUtils.mkdir_p(storage_path) if context.env.test? && respond_to?(:storage_path, true)
FileUtils.mkdir_p(storage_path) if context&.env&.test? && respond_to?(:storage_path, true)
target.dump(backup_output, backup_id)
target.dump(backup_output)
end
def restore!(archive_directory, backup_id)
def restore!(archive_directory)
archived_data_location = Pathname(archive_directory).join(destination_path)
target.restore(archived_data_location, backup_id)
target.restore(archived_data_location)
end
# Key string that identifies the task
@ -70,7 +68,10 @@ module Gitlab
end
def config
context.config(id)
return context.config(id) if context
Output.warning("No context passed to derive configuration from.")
nil
end
def object_storage?

View File

@ -14,7 +14,7 @@ module Gitlab
private
def local
::Backup::Targets::Files.new(nil, storage_path, options: options, excludes: ['tmp'])
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path, excludes: ['tmp'])
end
def storage_path = context.terraform_state_path

View File

@ -14,7 +14,7 @@ module Gitlab
private
def local
::Backup::Targets::Files.new(nil, storage_path, options: options, excludes: ['tmp'])
Gitlab::Backup::Cli::Targets::Files.new(context, storage_path, excludes: ['tmp'])
end
def storage_path = context.upload_path

View File

@ -49,6 +49,14 @@ module Gitlab
Shell::Command.new(cmd, *tar_args)
end
def pack_from_stdin_cmd(target_directory:, target:, excludes: [])
pack_cmd(
archive_file: '-', # use stdin as list of files
target_directory: target_directory,
target: target,
excludes: excludes)
end
# @param [Object] archive_file
# @param [Object] target_directory
# @return [Gitlab::Backup::Cli::Shell::Command]
@ -64,6 +72,11 @@ module Gitlab
Shell::Command.new(cmd, *tar_args)
end
def extract_from_stdin_cmd(target_directory:)
extract_cmd(archive_file: '-', # use stdin as file source content
target_directory: target_directory)
end
private
def build_exclude_patterns(*patterns)

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
RSpec.describe Gitlab::Backup::Cli::GitlabConfig do
let(:config_fixture) { fixtures_path.join('gitlab.yml') }
let(:config_fixture) { fixtures_path.join('config/gitlab.yml') }
subject(:gitlab_config) { described_class.new(config_fixture) }

View File

@ -0,0 +1,188 @@
# frozen_string_literal: true
require 'spec_helper'
require 'active_support/testing/time_helpers'
RSpec.describe Gitlab::Backup::Cli::Targets::Files, feature_category: :backup_restore do
include ActiveSupport::Testing::TimeHelpers
let(:status_0) { instance_double(Process::Status, success?: true, exitstatus: 0) }
let(:status_1) { instance_double(Process::Status, success?: false, exitstatus: 1) }
let(:status_2) { instance_double(Process::Status, success?: false, exitstatus: 2) }
let(:pipeline_status_failed) do
Gitlab::Backup::Cli::Shell::Pipeline::Result.new(stderr: 'Cannot mkdir', status_list: [status_1, status_0])
end
let(:tmp_backup_restore_dir) { Dir.mktmpdir('files-target-restore') }
let(:destination) { 'registry.tar.gz' }
let(:context) { Gitlab::Backup::Cli::Context.build }
let!(:workdir) do
FileUtils.mkdir_p(context.backup_basedir)
Pathname(Dir.mktmpdir('backup', context.backup_basedir))
end
let(:restore_target) { File.realpath(tmp_backup_restore_dir) }
let(:backup_target) do
%w[@pages.tmp lost+found @hashed].each do |folder|
path = Pathname(tmp_backup_restore_dir).join(folder, 'something', 'else')
FileUtils.mkdir_p(path)
FileUtils.touch(path.join('artifacts.zip'))
end
File.realpath(tmp_backup_restore_dir)
end
before do
allow(FileUtils).to receive(:mv).and_return(true)
allow(File).to receive(:exist?).and_return(true)
end
after do
FileUtils.rm_rf([restore_target, backup_target, destination], secure: true)
end
describe '#dump' do
subject(:files) do
described_class.new(context, backup_target, excludes: ['@pages.tmp'])
end
it 'raises no errors' do
expect { files.dump(destination) }.not_to raise_error
end
it 'excludes tmp dirs from archive' do
expect_next_instance_of(Gitlab::Backup::Cli::Shell::Pipeline) do |pipeline|
tar_cmd = pipeline.shell_commands[0]
expect(tar_cmd.cmd_args).to include('--exclude=lost+found')
expect(tar_cmd.cmd_args).to include('--exclude=./@pages.tmp')
allow(pipeline).to receive(:run!).and_call_original
end
files.dump(destination)
end
it 'raises an error on failure' do
expect_next_instance_of(Gitlab::Backup::Cli::Shell::Pipeline::Result) do |result|
expect(result).to receive(:success?).and_return(false)
end
expect do
files.dump(destination)
end.to raise_error(/Failed to create compressed file/)
end
end
describe '#restore' do
let(:source) { File.join(restore_target, 'backup.tar.gz') }
let(:pipeline) { Gitlab::Backup::Cli::Shell::Pipeline.new(Gitlab::Backup::Cli::Shell::Command.new('echo 0')) }
subject(:files) { described_class.new(context, restore_target) }
before do
FileUtils.touch(source)
allow(Gitlab::Backup::Cli::Shell::Pipeline).to receive(:new).and_return(pipeline)
end
context 'when storage path exists' do
before do
allow(File).to receive(:exist?).with(restore_target).and_return(true)
end
it 'logs a warning about existing files' do
expect(Gitlab::Backup::Cli::Output).to receive(:warning).with(/Ignoring existing files/)
files.restore(source)
end
end
context 'when pipeline execution is successful' do
before do
allow_next_instance_of(Gitlab::Backup::Cli::Shell::Pipeline::Result) do |result|
allow(result).to receive(:success?).and_return(true)
end
end
it 'does not raise an error' do
expect { files.restore(source) }.not_to raise_error
end
end
context 'when pipeline execution fails' do
before do
allow(files).to receive(:dump).and_return(true)
allow_next_instance_of(Gitlab::Backup::Cli::Shell::Pipeline) do |pipeline|
allow(pipeline).to receive(:run!).and_return(pipeline_status_failed)
end
end
it 'raises a FileRestoreError' do
expect { files.restore(source) }.to raise_error(Gitlab::Backup::Cli::Errors::FileRestoreError)
end
end
context 'when pipeline execution has non-critical warnings' do
let(:warning_message) { 'tar: .: Cannot mkdir: No such file or directory' }
before do
allow_next_instance_of(Gitlab::Backup::Cli::Shell::Pipeline::Result) do |result|
allow(result).to receive(:success?).and_return(false)
allow(result).to receive(:stderr).and_return(warning_message)
allow(result).to receive(:status_list).and_return([status_0, status_2])
end
end
it 'does not raise an error' do
expect { files.restore(source) }.not_to raise_error
end
end
end
describe '#ignore_non_success?' do
subject(:files) do
described_class.new(context, '/var/gitlab-registry')
end
context 'if `tar` command exits with 1 exitstatus' do
it 'returns true' do
expect(
files.send(:ignore_non_success?, 1, nil)
).to be_truthy
end
it 'outputs a warning' do
expect do
files.send(:ignore_non_success?, 1, nil)
end.to output(/Ignoring tar exit status 1/).to_stdout
end
end
context 'if `tar` command exits with 2 exitstatus with non-critical warning' do
it 'returns true' do
expect(
files.send(:ignore_non_success?, 2, 'gtar: .: Cannot mkdir: No such file or directory')
).to be_truthy
end
it 'outputs a warning' do
expect do
files.send(:ignore_non_success?, 2, 'gtar: .: Cannot mkdir: No such file or directory')
end.to output(/Ignoring non-success exit status/).to_stdout
end
end
context 'if `tar` command exits with any other unlisted error' do
it 'returns false' do
expect(
files.send(:ignore_non_success?, 2, 'unlisted_error')
).to be_falsey
end
end
end
end

View File

@ -0,0 +1,199 @@
# frozen_string_literal: true
require 'spec_helper'
require 'open3'
RSpec.describe Gitlab::Backup::Cli::Targets::GitalyBackup do
let(:context) { Gitlab::Backup::Cli::Context.build }
let(:gitaly_backup) { described_class.new(context) }
describe '#start' do
context 'when creating a backup' do
it 'starts the gitaly-backup process with the correct arguments' do
backup_repos_path = '/path/to/backup/repos'
backup_id = 'abc123'
expected_args = ['create', '-path', backup_repos_path, '-layout', 'manifest', '-id', backup_id]
expect(Open3).to receive(:popen2).with(instance_of(Hash), instance_of(String), *expected_args)
gitaly_backup.start(:create, backup_repos_path, backup_id: backup_id)
end
end
context 'when restoring a backup' do
it 'starts the gitaly-backup process with the correct arguments' do
backup_repos_path = '/path/to/backup/repos'
backup_id = 'abc123'
remove_all_repositories = %w[repo1 repo2]
expected_args = ['restore', '-path', backup_repos_path, '-layout', 'manifest', '-remove-all-repositories',
'repo1,repo2', '-id', backup_id]
expect(Open3).to receive(:popen2).with(instance_of(Hash), instance_of(String), *expected_args)
gitaly_backup.start(:restore, backup_repos_path, backup_id: backup_id,
remove_all_repositories: remove_all_repositories)
end
end
context 'when an invalid type is provided' do
it 'raises an error' do
expect do
gitaly_backup.start(:invalid,
'/path/to/backup/repos')
end.to raise_error(Gitlab::Backup::Cli::Errors::GitalyBackupError, /unknown backup type: invalid/)
end
end
context 'when already started' do
it 'raises an error' do
gitaly_backup.instance_variable_set(:@thread, Thread.new { true })
expect do
gitaly_backup.start(:create,
'/path/to/backup/repos')
end.to raise_error(Gitlab::Backup::Cli::Errors::GitalyBackupError, /already started/)
end
end
end
describe '#finish!' do
context 'when not started' do
it 'returns without raising an error' do
expect { gitaly_backup.finish! }.not_to raise_error
end
end
context 'when started' do
let(:thread) { instance_double('Thread', join: nil, value: instance_double(Process::Status, exitstatus: 0)) }
before do
gitaly_backup.instance_variable_set(:@thread, thread)
gitaly_backup.instance_variable_set(:@input_stream, instance_double('InputStream', close: nil))
end
it 'closes the input stream and joins the thread' do
input_stream = gitaly_backup.instance_variable_get(:@input_stream)
expect(input_stream).to receive(:close)
expect(thread).to receive(:join)
gitaly_backup.finish!
end
context 'when the process exits with a non-zero status' do
let(:thread) { instance_double('Thread', join: nil, value: instance_double(Process::Status, exitstatus: 1)) }
it 'raises an error' do
expect do
gitaly_backup.finish!
end.to raise_error(Gitlab::Backup::Cli::Errors::GitalyBackupError, /gitaly-backup exit status 1/)
end
end
end
end
describe '#enqueue' do
context 'when not started' do
it 'raises an error' do
expect do
gitaly_backup.enqueue(double, :project)
end.to raise_error(Gitlab::Backup::Cli::Errors::GitalyBackupError, /not started/)
end
end
context 'when started' do
let(:input_stream) { instance_double('InputStream', puts: nil) }
before do
gitaly_backup.instance_variable_set(:@input_stream, input_stream)
gitaly_backup.instance_variable_set(:@thread, Thread.new { true })
end
context 'with a project repository' do
let(:container) do
instance_double('Project', repository_storage: 'storage', disk_path: 'disk/path', full_path: 'group/project')
end
it 'schedules a backup job with the correct parameters' do
expected_json = {
storage_name: 'storage',
relative_path: 'disk/path',
gl_project_path: 'group/project',
always_create: true
}.to_json
expect(input_stream).to receive(:puts).with(expected_json)
gitaly_backup.enqueue(container, :project)
end
end
context 'with a wiki repository' do
let(:wiki) do
instance_double('Wiki', repository_storage: 'wiki_storage', disk_path: 'wiki/disk/path',
full_path: 'group/project.wiki')
end
let(:container) { instance_double('Project', wiki: wiki) }
it 'schedules a backup job with the correct parameters' do
expected_json = {
storage_name: 'wiki_storage',
relative_path: 'wiki/disk/path',
gl_project_path: 'group/project.wiki',
always_create: false
}.to_json
expect(input_stream).to receive(:puts).with(expected_json)
gitaly_backup.enqueue(container, :wiki)
end
end
context 'with a snippet repository' do
let(:container) do
instance_double('Snippet', repository_storage: 'storage', disk_path: 'disk/path', full_path: 'snippets/1')
end
it 'schedules a backup job with the correct parameters' do
expected_json = {
storage_name: 'storage',
relative_path: 'disk/path',
gl_project_path: 'snippets/1',
always_create: false
}.to_json
expect(input_stream).to receive(:puts).with(expected_json)
gitaly_backup.enqueue(container, :snippet)
end
end
context 'with a design repository' do
let(:project) { instance_double('Project', disk_path: 'disk/path', full_path: 'group/project') }
let(:container) do
instance_double('DesignRepository', project: project,
repository: instance_double('Repository', repository_storage: 'storage'))
end
it 'schedules a backup job with the correct parameters' do
expected_json = {
storage_name: 'storage',
relative_path: 'disk/path',
gl_project_path: 'group/project',
always_create: false
}.to_json
expect(input_stream).to receive(:puts).with(expected_json)
gitaly_backup.enqueue(container, :design)
end
end
context 'with an invalid repository type' do
it 'raises an error' do
expect do
gitaly_backup.enqueue(nil,
:invalid)
end.to raise_error(Gitlab::Backup::Cli::Errors::GitalyBackupError, /no container for repo type/)
end
end
end
end
end

View File

@ -55,17 +55,15 @@ RSpec.describe Gitlab::Backup::Cli::Targets::ObjectStorage::Google do
}
end
let(:backup_options) { instance_double("::Backup::Options", remote_directory: 'fake_backup_bucket') }
before do
allow(Gitlab).to receive(:config).and_return(gitlab_config)
allow(::Google::Cloud::StorageTransfer).to receive(:storage_transfer_service).and_return(client)
allow(gitlab_config).to receive(:[]).with('fake_object').and_return(supported_config)
end
subject(:object_storage) { described_class.new("fake_object", backup_options, supported_config) }
subject(:object_storage) { described_class.new("fake_object", 'fake_backup_bucket', supported_config) }
describe "#dump" do
describe "#dump", :silence_output do
context "when job exists" do
before do
allow(client).to receive(:get_transfer_job).and_return(backup_transfer_job)
@ -79,7 +77,7 @@ RSpec.describe Gitlab::Backup::Cli::Targets::ObjectStorage::Google do
transfer_job: updated_spec
)
expect(client).to receive(:run_transfer_job).with({ job_name: "fake_transfer_job", project_id: "fake_project" })
object_storage.dump(nil, 12345)
object_storage.dump(12345)
end
end
@ -94,12 +92,12 @@ RSpec.describe Gitlab::Backup::Cli::Targets::ObjectStorage::Google do
it "creates a new job" do
expect(client).to receive(:create_transfer_job)
.with(transfer_job: new_backup_transfer_job_spec).and_return(backup_transfer_job)
object_storage.dump(nil, 12345)
object_storage.dump(12345)
end
end
end
describe "#restore" do
describe "#restore", :silence_output do
context "when job exists" do
before do
allow(client).to receive(:get_transfer_job).and_return(restore_transfer_job)
@ -113,7 +111,7 @@ RSpec.describe Gitlab::Backup::Cli::Targets::ObjectStorage::Google do
transfer_job: updated_spec
)
expect(client).to receive(:run_transfer_job).with({ job_name: "fake_transfer_job", project_id: "fake_project" })
object_storage.restore(nil, 12345)
object_storage.restore(12345)
end
end
@ -128,7 +126,7 @@ RSpec.describe Gitlab::Backup::Cli::Targets::ObjectStorage::Google do
it "creates a new job" do
expect(client).to receive(:create_transfer_job)
.with(transfer_job: new_restore_transfer_job_spec).and_return(restore_transfer_job)
object_storage.restore(nil, 12345)
object_storage.restore(12345)
end
end
end

View File

@ -0,0 +1,74 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Backup::Cli::Targets::Repositories do
let(:context) { Gitlab::Backup::Cli::Context.build }
let(:strategy) { repo_target.strategy }
subject(:repo_target) { described_class.new(context) }
describe '#dump' do
it 'starts and finishes the strategy' do
expect(strategy).to receive(:start).with(:create, '/path/to/destination')
expect(repo_target).to receive(:enqueue_consecutive)
expect(strategy).to receive(:finish!)
repo_target.dump('/path/to/destination')
end
end
describe '#restore' do
it 'starts and finishes the strategy' do
expect(strategy).to receive(:start).with(:restore, '/path/to/destination', remove_all_repositories: ["default"])
expect(repo_target).to receive(:enqueue_consecutive)
expect(strategy).to receive(:finish!)
expect(repo_target).to receive(:restore_object_pools)
repo_target.restore('/path/to/destination')
end
end
describe '#enqueue_consecutive' do
it 'calls enqueue_consecutive_projects and enqueue_consecutive_snippets' do
expect(repo_target).to receive(:enqueue_consecutive_projects)
expect(repo_target).to receive(:enqueue_consecutive_snippets)
repo_target.send(:enqueue_consecutive)
end
end
describe '#enqueue_project' do
let(:project) { instance_double('Project', design_management_repository: nil) }
it 'enqueues project and wiki' do
expect(strategy).to receive(:enqueue).with(project, Gitlab::Backup::Cli::RepoType::PROJECT)
expect(strategy).to receive(:enqueue).with(project, Gitlab::Backup::Cli::RepoType::WIKI)
repo_target.send(:enqueue_project, project)
end
context 'when project has design management repository' do
let(:design_repo) { instance_double('DesignRepository') }
let(:project) { instance_double('Project', design_management_repository: design_repo) }
it 'enqueues project, wiki, and design' do
expect(strategy).to receive(:enqueue).with(project, Gitlab::Backup::Cli::RepoType::PROJECT)
expect(strategy).to receive(:enqueue).with(project, Gitlab::Backup::Cli::RepoType::WIKI)
expect(strategy).to receive(:enqueue).with(design_repo, Gitlab::Backup::Cli::RepoType::DESIGN)
repo_target.send(:enqueue_project, project)
end
end
end
describe '#enqueue_snippet' do
let(:snippet) { instance_double('Snippet') }
it 'enqueues the snippet' do
expect(strategy).to receive(:enqueue).with(snippet, Gitlab::Backup::Cli::RepoType::SNIPPET)
repo_target.send(:enqueue_snippet, snippet)
end
end
end

View File

@ -1,12 +1,11 @@
# frozen_string_literal: true
RSpec.describe Gitlab::Backup::Cli::Tasks::Task do
let(:options) { instance_double("::Backup::Option", backup_id: "abc123") }
let(:context) { build_fake_context }
let(:tmpdir) { Pathname.new(Dir.mktmpdir('task', temp_path)) }
let(:metadata) { build(:backup_metadata) }
subject(:task) { described_class.new(options: options, context: context) }
subject(:task) { described_class.new(context: context) }
after do
FileUtils.rmtree(tmpdir)
@ -37,9 +36,9 @@ RSpec.describe Gitlab::Backup::Cli::Tasks::Task do
end
end
describe '#target' do
describe '#local' do
it 'raises an error' do
expect { task.send(:target) }.to raise_error(NotImplementedError)
expect { task.send(:local) }.to raise_error(NotImplementedError)
end
end
end
@ -49,7 +48,7 @@ RSpec.describe Gitlab::Backup::Cli::Tasks::Task do
expect(task).to receive(:destination_path).and_return(tmpdir.join('test_task'))
expect(task).to receive_message_chain(:target, :dump)
task.backup!(tmpdir, metadata.backup_id)
task.backup!(tmpdir)
end
end
@ -59,7 +58,7 @@ RSpec.describe Gitlab::Backup::Cli::Tasks::Task do
expect(task).to receive(:destination_path).and_return(tmpdir.join('test_task'))
expect(task).to receive_message_chain(:target, :restore)
task.restore!(archive_directory, options.backup_id)
task.restore!(archive_directory)
end
end
end

View File

@ -20,9 +20,13 @@ RSpec.describe Gitlab::Backup::Cli::Utils::Tar do
target_basepath = tempdir
target = tempdir.join('*')
result = nil
expect do
tar.pack_cmd(archive_file: archive_file, target_directory: target_basepath, target: target)
result = tar.pack_cmd(archive_file: archive_file, target_directory: target_basepath, target: target)
end.not_to raise_exception
expect(result).to be_a(Gitlab::Backup::Cli::Shell::Command)
end
end
@ -83,4 +87,51 @@ RSpec.describe Gitlab::Backup::Cli::Utils::Tar do
end
end
end
describe '#pack_from_stdin_cmd' do
it 'delegates parameters to pack_cmd passing archive_files: as -' do
tar_tempdir do |tempdir|
target_basepath = tempdir
target = tempdir.join('*')
excludes = ['lost+found']
expect(tar).to receive(:pack_cmd).with(
archive_file: '-',
target_directory: target_basepath,
target: target,
excludes: excludes)
tar.pack_from_stdin_cmd(target_directory: target_basepath, target: target, excludes: excludes)
end
end
end
describe '#extract_cmd' do
it 'instantiate a Shell::Command with default required params' do
tar_tempdir do |tempdir|
archive_file = tempdir.join('testarchive.tar')
target_basepath = tempdir
result = nil
expect do
result = tar.extract_cmd(archive_file: archive_file, target_directory: target_basepath)
end.not_to raise_exception
expect(result).to be_a(Gitlab::Backup::Cli::Shell::Command)
end
end
end
describe 'extract_from_stdin_cmd' do
it 'delegates parameters to extract_cmd passing archive_files: as -' do
tar_tempdir do |tempdir|
target_basepath = tempdir
expect(tar).to receive(:extract_cmd).with(archive_file: '-', target_directory: target_basepath)
tar.extract_from_stdin_cmd(target_directory: target_basepath)
end
end
end
end

View File

@ -7,6 +7,7 @@ require 'thor'
require 'gitlab/rspec/next_instance_of'
ENV["RAILS_ENV"] ||= "test"
GITLAB_PATH = File.expand_path(File.join(__dir__, '/fixtures/'))
# Load spec support code
Dir['spec/support/**/*.rb'].each { |f| load f }

View File

@ -24,7 +24,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
describe '#backup_basedir' do
context 'with a relative path configured in gitlab.yml' do
it 'returns a full path based on gitlab basepath' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.backup_basedir).to eq(fake_gitlab_basepath.join('tmp/tests/backups'))
end
@ -58,7 +58,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.ci_builds_path).to eq(Pathname('/tmp/gitlab/full/builds'))
end
@ -84,7 +84,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.ci_job_artifacts_path).to eq(Pathname('/tmp/gitlab/full/artifacts'))
end
@ -110,7 +110,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.ci_secure_files_path).to eq(Pathname('/tmp/gitlab/full/ci_secure_files'))
end
@ -136,7 +136,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.ci_lfs_path).to eq(Pathname('/tmp/gitlab/full/lfs-objects'))
end
@ -162,7 +162,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.packages_path).to eq(Pathname('/tmp/gitlab/full/packages'))
end
@ -188,7 +188,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.pages_path).to eq(Pathname('/tmp/gitlab/full/pages'))
end
@ -214,7 +214,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.registry_path).to eq(Pathname('/tmp/gitlab/full/registry'))
end
@ -240,7 +240,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.terraform_state_path).to eq(Pathname('/tmp/gitlab/full/terraform_state'))
end
@ -266,7 +266,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.upload_path).to eq(Pathname('/tmp/gitlab/full/public/uploads'))
end
@ -292,7 +292,7 @@ RSpec.shared_examples "context exposing all common configuration methods" do
context 'with a full path configured in gitlab.yml' do
it 'returns a full path as configured in gitlab.yml' do
use_gitlab_config_fixture('gitlab.yml')
use_gitlab_config_fixture('config/gitlab.yml')
expect(context.send(:gitlab_shared_path)).to eq(Pathname('/tmp/gitlab/full/shared'))
end

View File

@ -26631,9 +26631,6 @@ msgstr ""
msgid "GroupSelect|Select a group"
msgstr ""
msgid "GroupSettings|'%{name}' has been scheduled for removal on %{date}."
msgstr ""
msgid "GroupSettings|After the instance reaches the user cap, any user who is added or requests access must be approved by an administrator. Leave empty for an unlimited user cap. If you change the user cap to unlimited, you must re-enable %{project_sharing_docs_link_start}project sharing%{link_end} and %{group_sharing_docs_link_start}group sharing%{link_end}."
msgstr ""
@ -42584,9 +42581,6 @@ msgstr ""
msgid "Project '%{project_name}' was successfully updated."
msgstr ""
msgid "Project '%{project_name}' will be deleted on %{date}"
msgstr ""
msgid "Project Badges"
msgstr ""
@ -53434,6 +53428,9 @@ msgstr ""
msgid "Storage nodes for new repositories"
msgstr ""
msgid "Storage size"
msgstr ""
msgid "Storage:"
msgstr ""

View File

@ -6,8 +6,8 @@ module QA
module Overview
module Groups
class Index < QA::Page::Base
view 'app/views/admin/groups/index.html.haml' do
element 'group-search-field', required: true
view 'app/assets/javascripts/admin/groups/components/filtered_search_and_sort.vue' do
element 'admin-groups-filtered-search-and-sort', required: true
end
view 'app/views/admin/groups/_group.html.haml' do
@ -16,7 +16,9 @@ module QA
end
def search_group(group_name)
find_element('group-search-field').set(group_name).send_keys(:return)
within_element('admin-groups-filtered-search-and-sort') do
find_element('filtered-search-term-input').set(group_name).send_keys(:return)
end
end
def click_group(group_name)

View File

View File

@ -25,37 +25,6 @@ RSpec.describe Admin::GroupsController, feature_category: :groups_and_projects d
expect(assigns(:groups)).to match_array([group, group_2, group_3])
end
it 'renders a correct list of sort by options' do
get :index
html_rendered = Nokogiri::HTML(response.body)
sort_options = Gitlab::Json.parse(html_rendered.css('[data-items]')[0]['data-items'])
expect(response).to render_template('shared/groups/_dropdown')
expect(sort_options.size).to eq(7)
expect(sort_options[0]['value']).to eq('name_asc')
expect(sort_options[0]['text']).to eq(s_('SortOptions|Name'))
expect(sort_options[1]['value']).to eq('name_desc')
expect(sort_options[1]['text']).to eq(s_('SortOptions|Name, descending'))
expect(sort_options[2]['value']).to eq('created_desc')
expect(sort_options[2]['text']).to eq(s_('SortOptions|Last created'))
expect(sort_options[3]['value']).to eq('created_asc')
expect(sort_options[3]['text']).to eq(s_('SortOptions|Oldest created'))
expect(sort_options[4]['value']).to eq('latest_activity_desc')
expect(sort_options[4]['text']).to eq(_('Updated date'))
expect(sort_options[5]['value']).to eq('latest_activity_asc')
expect(sort_options[5]['text']).to eq(s_('SortOptions|Oldest updated'))
expect(sort_options[6]['value']).to eq('storage_size_desc')
expect(sort_options[6]['text']).to eq(s_('SortOptions|Largest group'))
end
context 'when a sort param is present' do
it 'returns a sorted by name_asc result' do
get :index, params: { sort: 'name_asc' }

View File

@ -0,0 +1,125 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import setWindowLocation from 'helpers/set_window_location_helper';
import AdminGroupsFilteredSearchAndSort from '~/admin/groups/components/filtered_search_and_sort.vue';
import FilteredSearchAndSort from '~/groups_projects/components/filtered_search_and_sort.vue';
import {
FILTERED_SEARCH_TERM_KEY,
SORT_DIRECTION_ASC,
SORT_DIRECTION_DESC,
SORT_OPTION_CREATED_DATE,
SORT_OPTION_UPDATED_DATE,
SORT_OPTIONS,
} from '~/admin/groups/constants';
import { visitUrl } from '~/lib/utils/url_utility';
jest.mock('~/lib/utils/url_utility', () => ({
...jest.requireActual('~/lib/utils/url_utility'),
visitUrl: jest.fn(),
}));
describe('AdminGroupsFilteredSearchAndSort', () => {
let wrapper;
const createComponent = () => {
wrapper = shallowMountExtended(AdminGroupsFilteredSearchAndSort, {});
};
const findFilteredSearchAndSort = () => wrapper.findComponent(FilteredSearchAndSort);
it('renders FilteredSearchAndSort component with the correct initial props', () => {
createComponent();
expect(findFilteredSearchAndSort().props()).toMatchObject({
filteredSearchNamespace: 'admin-groups',
filteredSearchTokens: [],
filteredSearchTermKey: 'name',
filteredSearchRecentSearchesStorageKey: 'groups',
isAscending: false,
sortOptions: SORT_OPTIONS,
activeSortOption: SORT_OPTION_CREATED_DATE,
filteredSearchQuery: {},
});
});
describe('when the search bar is submitted', () => {
const searchTerm = 'test';
beforeEach(() => {
createComponent();
findFilteredSearchAndSort().vm.$emit('filter', {
[FILTERED_SEARCH_TERM_KEY]: searchTerm,
});
});
it('visits the URL with the correct query string', () => {
expect(visitUrl).toHaveBeenCalledWith(`?${FILTERED_SEARCH_TERM_KEY}=${searchTerm}`);
});
});
describe('when the sort item is changed', () => {
beforeEach(() => {
createComponent();
findFilteredSearchAndSort().vm.$emit('sort-by-change', SORT_OPTION_UPDATED_DATE.value);
});
it('visits the URL with the correct query string', () => {
expect(visitUrl).toHaveBeenCalledWith(
`?sort=${SORT_OPTION_UPDATED_DATE.value}_${SORT_DIRECTION_DESC}`,
);
});
});
describe('when the sort direction is changed', () => {
beforeEach(() => {
createComponent();
findFilteredSearchAndSort().vm.$emit('sort-direction-change', true);
});
it('visits the URL with the correct query string', () => {
expect(visitUrl).toHaveBeenCalledWith(
`?sort=${SORT_OPTION_CREATED_DATE.value}_${SORT_DIRECTION_ASC}`,
);
});
});
describe('when the search term is present and the sort item is changed', () => {
const searchTerm = 'group-name';
beforeEach(() => {
setWindowLocation(`?${FILTERED_SEARCH_TERM_KEY}=${searchTerm}`);
createComponent();
findFilteredSearchAndSort().vm.$emit('sort-direction-change', true);
});
it('visits the URL with the correct query string', () => {
expect(visitUrl).toHaveBeenCalledWith(
`?${FILTERED_SEARCH_TERM_KEY}=${searchTerm}&sort=${SORT_OPTION_CREATED_DATE.value}_${SORT_DIRECTION_ASC}`,
);
});
});
describe('when the sort item is present and the search term is changed', () => {
const searchTerm = 'group-name';
beforeEach(() => {
setWindowLocation(`?sort=${SORT_OPTION_CREATED_DATE.value}_${SORT_DIRECTION_ASC}`);
createComponent();
findFilteredSearchAndSort().vm.$emit('filter', {
[FILTERED_SEARCH_TERM_KEY]: searchTerm,
});
});
it('visits the URL with the correct query string', () => {
expect(visitUrl).toHaveBeenCalledWith(
`?${FILTERED_SEARCH_TERM_KEY}=${searchTerm}&sort=${SORT_OPTION_CREATED_DATE.value}_${SORT_DIRECTION_ASC}`,
);
});
});
});

View File

@ -24,6 +24,7 @@ exports[`Merge request dashboard collapsible section renders section 1`] = `
Approved
<gl-badge-stub
class="gl-ml-1"
data-testid="merge-request-list-count"
iconsize="md"
size="sm"
tag="span"

View File

@ -29,6 +29,12 @@ describe('Merge request dashboard collapsible section', () => {
expect(wrapper.findByTestId('section-content').exists()).toBe(false);
});
it('hides badge when count is null', () => {
createComponent(null);
expect(wrapper.findByTestId('merge-request-list-count').exists()).toBe(false);
});
it('expands collapsed content', async () => {
createComponent(1);

View File

@ -16,6 +16,7 @@ describe('Merge requests query component', () => {
let slotSpy;
let reviewerQueryMock;
let assigneeQueryMock;
let assigneeCountQueryMock;
function createComponent(
props = { query: 'reviewRequestedMergeRequests', variables: { state: 'opened' } },
@ -54,6 +55,9 @@ describe('Merge requests query component', () => {
},
},
});
assigneeCountQueryMock = jest
.fn()
.mockResolvedValue({ data: { currentUser: { id: 1, mergeRequests: { count: 1 } } } });
const apolloProvider = createMockApollo(
[
[reviewerQuery, reviewerQueryMock],
@ -64,12 +68,7 @@ describe('Merge requests query component', () => {
.fn()
.mockResolvedValue({ data: { currentUser: { id: 1, mergeRequests: { count: 1 } } } }),
],
[
assigneeCountQuery,
jest
.fn()
.mockResolvedValue({ data: { currentUser: { id: 1, mergeRequests: { count: 1 } } } }),
],
[assigneeCountQuery, assigneeCountQueryMock],
],
{},
{ typePolicies: { Query: { fields: { currentUser: { merge: false } } } } },
@ -112,6 +111,18 @@ describe('Merge requests query component', () => {
});
});
it('does not call count query if hideCount is true', async () => {
createComponent({
query: 'assignedMergeRequests',
variables: { state: 'opened' },
hideCount: true,
});
await waitForPromises();
expect(assigneeCountQueryMock).not.toHaveBeenCalled();
});
it.each([
['reviewRequestedMergeRequests', 'reviewer'],
['assignedMergeRequests', 'assignee'],

View File

@ -0,0 +1,56 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import TabTitle from '~/merge_request_dashboard/components/tab_title.vue';
import reviewerCountQuery from '~/merge_request_dashboard/queries/reviewer_count.query.graphql';
import assigneeCountQuery from '~/merge_request_dashboard/queries/assignee_count.query.graphql';
Vue.use(VueApollo);
describe('Merge requests tab title component', () => {
let reviewerCountQueryMock;
let assigneeCountQueryMock;
let wrapper;
function createComponent(props = { queries: [] }) {
reviewerCountQueryMock = jest.fn().mockResolvedValue({
data: { currentUser: { id: 1, mergeRequests: { count: 1 } } },
});
assigneeCountQueryMock = jest
.fn()
.mockResolvedValue({ data: { currentUser: { id: 1, mergeRequests: { count: 1 } } } });
const apolloProvider = createMockApollo(
[
[reviewerCountQuery, reviewerCountQueryMock],
[assigneeCountQuery, assigneeCountQueryMock],
],
{},
{ typePolicies: { Query: { fields: { currentUser: { merge: false } } } } },
);
wrapper = shallowMountExtended(TabTitle, {
apolloProvider,
propsData: {
title: 'All',
tabKey: 'all',
...props,
},
});
}
const findTabCount = () => wrapper.findByTestId('tab-count');
it.each`
queries | count
${['reviewRequestedMergeRequests']} | ${'1'}
${['reviewRequestedMergeRequests', 'assignedMergeRequests']} | ${'2'}
`('sets count as $count for queries $queries', async ({ count, queries }) => {
createComponent({ queries: queries.map((query) => ({ query })) });
await waitForPromises();
expect(findTabCount().text()).toBe(count);
});
});

View File

@ -13,6 +13,7 @@ import WorkItemLinksForm from '~/work_items/components/work_item_links/work_item
import WorkItemActionsSplitButton from '~/work_items/components/work_item_links/work_item_actions_split_button.vue';
import WorkItemMoreActions from '~/work_items/components/shared/work_item_more_actions.vue';
import WorkItemRolledUpData from '~/work_items/components/work_item_links/work_item_rolled_up_data.vue';
import WorkItemRolledUpCount from '~/work_items/components/work_item_links/work_item_rolled_up_count.vue';
import getWorkItemTreeQuery from '~/work_items/graphql/work_item_tree.query.graphql';
import namespaceWorkItemTypesQuery from '~/work_items/graphql/namespace_work_item_types.query.graphql';
import {
@ -58,6 +59,7 @@ describe('WorkItemTree', () => {
const findMoreActions = () => wrapper.findComponent(WorkItemMoreActions);
const findCrudComponent = () => wrapper.findComponent(CrudComponent);
const findRolledUpData = () => wrapper.findComponent(WorkItemRolledUpData);
const findRolledUpCount = () => wrapper.findComponent(WorkItemRolledUpCount);
const createComponent = async ({
workItemType = 'Objective',
@ -360,18 +362,25 @@ describe('WorkItemTree', () => {
createComponent({ shouldWaitForPromise: false });
expect(findRolledUpData().exists()).toBe(false);
expect(findRolledUpCount().exists()).toBe(false);
await waitForPromises();
expect(findRolledUpData().exists()).toBe(true);
expect(findRolledUpCount().exists()).toBe(true);
expect(findRolledUpData().props()).toEqual({
workItemId: 'gid://gitlab/WorkItem/2',
workItemIid: '2',
workItemType: 'Objective',
rolledUpCountsByType: mockRolledUpCountsByType,
fullPath: 'test/project',
});
expect(findRolledUpCount().props()).toEqual({
hideCountWhenZero: false,
infoType: 'badge',
rolledUpCountsByType: mockRolledUpCountsByType,
});
});
it('fetches widget definitions and passes formatted allowed children by type to children wrapper', async () => {

View File

@ -142,23 +142,6 @@ RSpec.describe SortingHelper, feature_category: :shared do
end
end
describe '#groups_sort_options_hash' do
let(:expected_options) do
{
sort_value_name => sort_title_name,
sort_value_name_desc => sort_title_name_desc,
sort_value_recently_created => sort_title_recently_created,
sort_value_oldest_created => sort_title_oldest_created,
sort_value_latest_activity => sort_title_recently_updated,
sort_value_oldest_activity => sort_title_oldest_updated
}
end
it 'returns a hash of available sorting options for the groups' do
expect(groups_sort_options_hash).to eq(expected_options)
end
end
describe 'with `projects` controller' do
before do
stub_controller_path 'projects'

View File

@ -1,53 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'shared/groups/_dropdown.html.haml' do
describe 'render' do
describe 'when a sort option is not selected' do
before do
render 'shared/groups/dropdown'
end
it 'renders a default sort option' do
expect(rendered).to have_content 'Last created'
end
it 'renders correct sort by options' do
html_rendered = Nokogiri::HTML(rendered)
sort_options = Gitlab::Json.parse(html_rendered.css('[data-items]')[0]['data-items'])
expect(sort_options.size).to eq(6)
expect(sort_options[0]['value']).to eq('name_asc')
expect(sort_options[0]['text']).to eq(s_('SortOptions|Name'))
expect(sort_options[1]['value']).to eq('name_desc')
expect(sort_options[1]['text']).to eq(s_('SortOptions|Name, descending'))
expect(sort_options[2]['value']).to eq('created_desc')
expect(sort_options[2]['text']).to eq(s_('SortOptions|Last created'))
expect(sort_options[3]['value']).to eq('created_asc')
expect(sort_options[3]['text']).to eq(s_('SortOptions|Oldest created'))
expect(sort_options[4]['value']).to eq('latest_activity_desc')
expect(sort_options[4]['text']).to eq(_('Updated date'))
expect(sort_options[5]['value']).to eq('latest_activity_asc')
expect(sort_options[5]['text']).to eq(s_('SortOptions|Oldest updated'))
end
end
describe 'when a sort option is selected' do
before do
assign(:sort, 'name_desc')
render 'shared/groups/dropdown'
end
it 'renders the selected sort option' do
expect(rendered).to have_content 'Name, descending'
end
end
end
end