Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-11-16 21:08:11 +00:00
parent 8fa0c53e26
commit 19db7fd1fe
84 changed files with 1078 additions and 496 deletions

View File

@ -1,24 +1,4 @@
---
RSpec/TimecopFreeze:
Exclude:
- ee/spec/models/merge_train_spec.rb
- ee/spec/support/shared_contexts/lib/gitlab/insights/reducers/reducers_shared_contexts.rb
- qa/spec/support/repeater_spec.rb
- spec/features/users/active_sessions_spec.rb
- spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
- spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
- spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
- spec/lib/gitlab/auth/unique_ips_limiter_spec.rb
- spec/lib/gitlab/checks/timed_logger_spec.rb
- spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
- spec/lib/gitlab/puma_logging/json_formatter_spec.rb
- spec/lib/json_web_token/hmac_token_spec.rb
- spec/models/active_session_spec.rb
- spec/serializers/entity_date_helper_spec.rb
- spec/support/cycle_analytics_helpers/test_generation.rb
- spec/support/helpers/cycle_analytics_helpers.rb
- spec/support/helpers/javascript_fixtures_helpers.rb
- spec/support/shared_contexts/rack_attack_shared_context.rb
- spec/support/shared_examples/workers/concerns/reenqueuer_shared_examples.rb
- spec/workers/concerns/reenqueuer_spec.rb
- spec/workers/metrics/dashboard/prune_old_annotations_worker_spec.rb

View File

@ -12,8 +12,8 @@ import { TYPE_USER } from '~/graphql_shared/constants';
import { convertToGraphQLId } from '~/graphql_shared/utils';
import { __ } from '~/locale';
import {
OPERATOR_IS_AND_IS_NOT,
OPERATOR_IS_ONLY,
OPERATORS_IS_NOT,
OPERATORS_IS,
TOKEN_TITLE_ASSIGNEE,
TOKEN_TITLE_AUTHOR,
TOKEN_TITLE_CONFIDENTIAL,
@ -71,7 +71,7 @@ export default {
icon: 'user',
title: TOKEN_TITLE_ASSIGNEE,
type: TOKEN_TYPE_ASSIGNEE,
operators: OPERATOR_IS_AND_IS_NOT,
operators: OPERATORS_IS_NOT,
token: AuthorToken,
unique: true,
fetchAuthors,
@ -81,7 +81,7 @@ export default {
icon: 'pencil',
title: TOKEN_TITLE_AUTHOR,
type: TOKEN_TYPE_AUTHOR,
operators: OPERATOR_IS_AND_IS_NOT,
operators: OPERATORS_IS_NOT,
symbol: '@',
token: AuthorToken,
unique: true,
@ -92,7 +92,7 @@ export default {
icon: 'labels',
title: TOKEN_TITLE_LABEL,
type: TOKEN_TYPE_LABEL,
operators: OPERATOR_IS_AND_IS_NOT,
operators: OPERATORS_IS_NOT,
token: LabelToken,
unique: false,
symbol: '~',
@ -128,7 +128,7 @@ export default {
title: TOKEN_TITLE_CONFIDENTIAL,
unique: true,
token: GlFilteredSearchToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
options: [
{ icon: 'eye-slash', value: 'yes', title: __('Yes') },
{ icon: 'eye', value: 'no', title: __('No') },

View File

@ -1,5 +1,5 @@
import { __ } from '~/locale';
import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
import { PARAM_KEY_PAUSED, I18N_PAUSED } from '../../constants';
@ -24,5 +24,5 @@ export const pausedTokenConfig = {
// see: https://gitlab.com/gitlab-org/gitlab-ui/-/issues/1438
title: title.replace(/\s/g, '\u00a0'),
})),
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
};

View File

@ -1,5 +1,5 @@
import {
OPERATOR_IS_ONLY,
OPERATORS_IS,
TOKEN_TITLE_STATUS,
} from '~/vue_shared/components/filtered_search_bar/constants';
import BaseToken from '~/vue_shared/components/filtered_search_bar/tokens/base_token.vue';
@ -38,5 +38,5 @@ export const statusTokenConfig = {
// see: https://gitlab.com/gitlab-org/gitlab-ui/-/issues/1438
title: title.replace(/\s/g, '\u00a0'),
})),
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
};

View File

@ -1,5 +1,5 @@
import { s__ } from '~/locale';
import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import { PARAM_KEY_TAG } from '../../constants';
import TagToken from './tag_token.vue';
@ -8,5 +8,5 @@ export const tagTokenConfig = {
title: s__('Runners|Tags'),
type: PARAM_KEY_TAG,
token: TagToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
};

View File

@ -1,7 +1,7 @@
<script>
import { mapActions, mapState } from 'vuex';
import {
OPERATOR_IS_ONLY,
OPERATORS_IS,
DEFAULT_NONE_ANY,
TOKEN_TITLE_ASSIGNEE,
TOKEN_TITLE_AUTHOR,
@ -52,7 +52,7 @@ export default {
initialMilestones: this.milestonesData,
unique: true,
symbol: '%',
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
fetchMilestones: this.fetchMilestones,
},
{
@ -64,7 +64,7 @@ export default {
initialLabels: this.labelsData,
unique: false,
symbol: '~',
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
fetchLabels: this.fetchLabels,
},
{
@ -74,7 +74,7 @@ export default {
token: AuthorToken,
initialAuthors: this.authorsData,
unique: true,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
fetchAuthors: this.fetchAuthors,
},
{
@ -84,7 +84,7 @@ export default {
token: AuthorToken,
initialAuthors: this.assigneesData,
unique: false,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
fetchAuthors: this.fetchAssignees,
},
];

View File

@ -28,7 +28,7 @@ import { helpPagePath } from '~/helpers/help_page_helper';
import {
DEFAULT_NONE_ANY,
FILTERED_SEARCH_TERM,
OPERATOR_IS_ONLY,
OPERATORS_IS,
TOKEN_TITLE_ASSIGNEE,
TOKEN_TITLE_AUTHOR,
TOKEN_TITLE_CONFIDENTIAL,
@ -39,8 +39,8 @@ import {
TOKEN_TITLE_ORGANIZATION,
TOKEN_TITLE_RELEASE,
TOKEN_TITLE_TYPE,
OPERATOR_IS_NOT_OR,
OPERATOR_IS_AND_IS_NOT,
OPERATORS_IS_NOT_OR,
OPERATORS_IS_NOT,
TOKEN_TYPE_ASSIGNEE,
TOKEN_TYPE_AUTHOR,
TOKEN_TYPE_CONFIDENTIAL,
@ -318,7 +318,7 @@ export default {
token: AuthorToken,
dataType: 'user',
defaultAuthors: DEFAULT_NONE_ANY,
operators: this.hasOrFeature ? OPERATOR_IS_NOT_OR : OPERATOR_IS_AND_IS_NOT,
operators: this.hasOrFeature ? OPERATORS_IS_NOT_OR : OPERATORS_IS_NOT,
fetchAuthors: this.fetchUsers,
recentSuggestionsStorageKey: `${this.fullPath}-issues-recent-tokens-assignee`,
preloadedAuthors,
@ -378,7 +378,7 @@ export default {
icon: 'eye-slash',
token: GlFilteredSearchToken,
unique: true,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
options: [
{ icon: 'eye-slash', value: 'yes', title: this.$options.i18n.confidentialYes },
{ icon: 'eye', value: 'no', title: this.$options.i18n.confidentialNo },
@ -396,7 +396,7 @@ export default {
isProject: this.isProject,
defaultContacts: DEFAULT_NONE_ANY,
recentSuggestionsStorageKey: `${this.fullPath}-issues-recent-tokens-crm-contacts`,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
unique: true,
});
}
@ -411,7 +411,7 @@ export default {
isProject: this.isProject,
defaultOrganizations: DEFAULT_NONE_ANY,
recentSuggestionsStorageKey: `${this.fullPath}-issues-recent-tokens-crm-organizations`,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
unique: true,
});
}

View File

@ -6,7 +6,7 @@ import {
FILTER_STARTED,
FILTER_UPCOMING,
OPERATOR_IS,
OPERATOR_IS_NOT,
OPERATOR_NOT,
OPERATOR_OR,
TOKEN_TYPE_ASSIGNEE,
TOKEN_TYPE_AUTHOR,
@ -174,7 +174,7 @@ export const filters = {
[OPERATOR_IS]: {
[NORMAL_FILTER]: 'author_username',
},
[OPERATOR_IS_NOT]: {
[OPERATOR_NOT]: {
[NORMAL_FILTER]: 'not[author_username]',
},
},
@ -190,7 +190,7 @@ export const filters = {
[SPECIAL_FILTER]: 'assignee_id',
[ALTERNATIVE_FILTER]: 'assignee_username',
},
[OPERATOR_IS_NOT]: {
[OPERATOR_NOT]: {
[NORMAL_FILTER]: 'not[assignee_username][]',
},
[OPERATOR_OR]: {
@ -208,7 +208,7 @@ export const filters = {
[NORMAL_FILTER]: 'milestone_title',
[SPECIAL_FILTER]: 'milestone_title',
},
[OPERATOR_IS_NOT]: {
[OPERATOR_NOT]: {
[NORMAL_FILTER]: 'not[milestone_title]',
[SPECIAL_FILTER]: 'not[milestone_title]',
},
@ -225,7 +225,7 @@ export const filters = {
[SPECIAL_FILTER]: 'label_name[]',
[ALTERNATIVE_FILTER]: 'label_name',
},
[OPERATOR_IS_NOT]: {
[OPERATOR_NOT]: {
[NORMAL_FILTER]: 'not[label_name][]',
},
},
@ -238,7 +238,7 @@ export const filters = {
[OPERATOR_IS]: {
[NORMAL_FILTER]: 'type[]',
},
[OPERATOR_IS_NOT]: {
[OPERATOR_NOT]: {
[NORMAL_FILTER]: 'not[type][]',
},
},
@ -253,7 +253,7 @@ export const filters = {
[NORMAL_FILTER]: 'release_tag',
[SPECIAL_FILTER]: 'release_tag',
},
[OPERATOR_IS_NOT]: {
[OPERATOR_NOT]: {
[NORMAL_FILTER]: 'not[release_tag]',
},
},
@ -268,7 +268,7 @@ export const filters = {
[NORMAL_FILTER]: 'my_reaction_emoji',
[SPECIAL_FILTER]: 'my_reaction_emoji',
},
[OPERATOR_IS_NOT]: {
[OPERATOR_NOT]: {
[NORMAL_FILTER]: 'not[my_reaction_emoji]',
},
},
@ -293,7 +293,7 @@ export const filters = {
[NORMAL_FILTER]: 'iteration_id',
[SPECIAL_FILTER]: 'iteration_id',
},
[OPERATOR_IS_NOT]: {
[OPERATOR_NOT]: {
[NORMAL_FILTER]: 'not[iteration_id]',
[SPECIAL_FILTER]: 'not[iteration_id]',
},
@ -309,7 +309,7 @@ export const filters = {
[NORMAL_FILTER]: 'epic_id',
[SPECIAL_FILTER]: 'epic_id',
},
[OPERATOR_IS_NOT]: {
[OPERATOR_NOT]: {
[NORMAL_FILTER]: 'not[epic_id]',
},
},
@ -324,7 +324,7 @@ export const filters = {
[NORMAL_FILTER]: 'weight',
[SPECIAL_FILTER]: 'weight',
},
[OPERATOR_IS_NOT]: {
[OPERATOR_NOT]: {
[NORMAL_FILTER]: 'not[weight]',
},
},

View File

@ -4,7 +4,7 @@ import { getParameterByName } from '~/lib/utils/url_utility';
import { __ } from '~/locale';
import {
FILTERED_SEARCH_TERM,
OPERATOR_IS_NOT,
OPERATOR_NOT,
OPERATOR_OR,
TOKEN_TYPE_ASSIGNEE,
TOKEN_TYPE_CONFIDENTIAL,
@ -261,7 +261,7 @@ export const convertToApiParams = (filterTokens) => {
const filterType = getFilterType(token.value.data, token.type);
const field = filters[token.type][API_PARAM][filterType];
let obj;
if (token.value.operator === OPERATOR_IS_NOT) {
if (token.value.operator === OPERATOR_NOT) {
obj = not;
} else if (token.value.operator === OPERATOR_OR) {
obj = or;

View File

@ -1,7 +1,7 @@
<script>
import { GlFilteredSearch } from '@gitlab/ui';
import { s__ } from '~/locale';
import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import JobStatusToken from './tokens/job_status_token.vue';
export default {
@ -27,7 +27,7 @@ export default {
title: s__('Jobs|Status'),
unique: true,
token: JobStatusToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
},
];
},

View File

@ -1,7 +1,7 @@
import { GlFilteredSearchToken } from '@gitlab/ui';
import { __, s__ } from '~/locale';
import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
// Overridden in EE
export const EE_APP_OPTIONS = {};
@ -117,7 +117,7 @@ export const FILTERED_SEARCH_TOKEN_TWO_FACTOR = {
title: s__('Members|2FA'),
token: GlFilteredSearchToken,
unique: true,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
options: [
{ value: 'enabled', title: s__('Members|Enabled') },
{ value: 'disabled', title: s__('Members|Disabled') },
@ -131,7 +131,7 @@ export const FILTERED_SEARCH_TOKEN_WITH_INHERITED_PERMISSIONS = {
title: s__('Members|Membership'),
token: GlFilteredSearchToken,
unique: true,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
options: [
{ value: 'exclude', title: s__('Members|Direct') },
{ value: 'only', title: s__('Members|Inherited') },

View File

@ -8,7 +8,7 @@ import {
TOKEN_TYPE_TAG_NAME,
TAG_LABEL,
} from '~/packages_and_registries/harbor_registry/constants/index';
import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import { createAlert } from '~/flash';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import TagsLoader from '~/packages_and_registries/shared/components/tags_loader.vue';
@ -39,7 +39,7 @@ export default {
title: TAG_LABEL,
unique: true,
token: GlFilteredSearchToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
},
],
data() {

View File

@ -1,7 +1,7 @@
<script>
import { s__ } from '~/locale';
import { sortableFields } from '~/packages_and_registries/package_registry/utils';
import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import RegistrySearch from '~/vue_shared/components/registry/registry_search.vue';
import UrlSync from '~/vue_shared/components/url_sync.vue';
import { getQueryParams, extractFilterAndSorting } from '~/packages_and_registries/shared/utils';
@ -21,7 +21,7 @@ export default {
title: s__('PackageRegistry|Type'),
unique: true,
token: PackageTypeToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
},
],
components: { RegistrySearch, UrlSync, LocalStorageSync },

View File

@ -3,7 +3,7 @@ import { GlFilteredSearch } from '@gitlab/ui';
import { map } from 'lodash';
import { s__ } from '~/locale';
import Tracking from '~/tracking';
import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import { TRACKING_CATEGORIES } from '../../constants';
import PipelineBranchNameToken from './tokens/pipeline_branch_name_token.vue';
import PipelineSourceToken from './tokens/pipeline_source_token.vue';
@ -54,7 +54,7 @@ export default {
title: s__('Pipeline|Trigger author'),
unique: true,
token: PipelineTriggerAuthorToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
projectId: this.projectId,
},
{
@ -63,7 +63,7 @@ export default {
title: s__('Pipeline|Branch name'),
unique: true,
token: PipelineBranchNameToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
projectId: this.projectId,
defaultBranchName: this.defaultBranchName,
disabled: this.selectedTypes.includes(this.$options.tagType),
@ -74,7 +74,7 @@ export default {
title: s__('Pipeline|Tag name'),
unique: true,
token: PipelineTagNameToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
projectId: this.projectId,
disabled: this.selectedTypes.includes(this.$options.branchType),
},
@ -84,7 +84,7 @@ export default {
title: s__('Pipeline|Status'),
unique: true,
token: PipelineStatusToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
},
{
type: this.$options.sourceType,
@ -92,7 +92,7 @@ export default {
title: s__('Pipeline|Source'),
unique: true,
token: PipelineSourceToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
},
];
},

View File

@ -1,7 +1,17 @@
<script>
import api from '~/api';
import showGlobalToast from '~/vue_shared/plugins/global_toast';
import MrWidgetAuthorTime from '../mr_widget_author_time.vue';
import StateContainer from '../state_container.vue';
import {
MR_WIDGET_CLOSED_REOPEN,
MR_WIDGET_CLOSED_REOPENING,
MR_WIDGET_CLOSED_RELOADING,
MR_WIDGET_CLOSED_REOPEN_FAILURE,
} from '../../i18n';
export default {
name: 'MRWidgetClosed',
components: {
@ -14,10 +24,62 @@ export default {
required: true,
},
},
data() {
return {
isPending: false,
isReloading: false,
};
},
computed: {
reopenText() {
let text = MR_WIDGET_CLOSED_REOPEN;
if (this.isPending) {
text = MR_WIDGET_CLOSED_REOPENING;
} else if (this.isReloading) {
text = MR_WIDGET_CLOSED_RELOADING;
}
return text;
},
actions() {
if (!window.gon?.current_user_id) {
return [];
}
return [
{
text: this.reopenText,
loading: this.isPending || this.isReloading,
onClick: this.reopen,
testId: 'extension-actions-reopen-button',
},
];
},
},
methods: {
reopen() {
this.isPending = true;
api
.updateMergeRequest(this.mr.targetProjectId, this.mr.iid, { state_event: 'reopen' })
.then(() => {
this.isReloading = true;
window.location.reload();
})
.catch(() => {
showGlobalToast(MR_WIDGET_CLOSED_REOPEN_FAILURE);
})
.finally(() => {
this.isPending = false;
});
},
},
};
</script>
<template>
<state-container :mr="mr" status="closed">
<state-container :mr="mr" status="closed" :actions="actions">
<mr-widget-author-time
:action-text="s__('mrWidget|Closed by')"
:author="mr.metrics.closedBy"

View File

@ -25,3 +25,10 @@ export const MERGE_TRAIN_BUTTON_TEXT = {
failed: __('Start merge train...'),
passed: __('Start merge train'),
};
export const MR_WIDGET_CLOSED_REOPEN = __('Reopen');
export const MR_WIDGET_CLOSED_REOPENING = __('Reopening...');
export const MR_WIDGET_CLOSED_RELOADING = __('Refreshing...');
export const MR_WIDGET_CLOSED_REOPEN_FAILURE = __(
'An error occurred. Unable to reopen this merge request.',
);

View File

@ -12,20 +12,16 @@ export const FILTER_NONE_ANY = [FILTER_NONE, FILTER_ANY];
export const OPERATOR_IS = '=';
export const OPERATOR_IS_TEXT = __('is');
export const OPERATOR_IS_NOT = '!=';
export const OPERATOR_IS_NOT_TEXT = __('is not one of');
export const OPERATOR_NOT = '!=';
export const OPERATOR_NOT_TEXT = __('is not one of');
export const OPERATOR_OR = '||';
export const OPERATOR_OR_TEXT = __('is one of');
export const OPERATOR_IS_ONLY = [{ value: OPERATOR_IS, description: OPERATOR_IS_TEXT }];
export const OPERATOR_IS_NOT_ONLY = [{ value: OPERATOR_IS_NOT, description: OPERATOR_IS_NOT_TEXT }];
export const OPERATOR_OR_ONLY = [{ value: OPERATOR_OR, description: OPERATOR_OR_TEXT }];
export const OPERATOR_IS_AND_IS_NOT = [...OPERATOR_IS_ONLY, ...OPERATOR_IS_NOT_ONLY];
export const OPERATOR_IS_NOT_OR = [
...OPERATOR_IS_ONLY,
...OPERATOR_IS_NOT_ONLY,
...OPERATOR_OR_ONLY,
];
export const OPERATORS_IS = [{ value: OPERATOR_IS, description: OPERATOR_IS_TEXT }];
export const OPERATORS_NOT = [{ value: OPERATOR_NOT, description: OPERATOR_NOT_TEXT }];
export const OPERATORS_OR = [{ value: OPERATOR_OR, description: OPERATOR_OR_TEXT }];
export const OPERATORS_IS_NOT = [...OPERATORS_IS, ...OPERATORS_NOT];
export const OPERATORS_IS_NOT_OR = [...OPERATORS_IS, ...OPERATORS_NOT, ...OPERATORS_OR];
export const DEFAULT_LABEL_NONE = { value: FILTER_NONE, text: __('None'), title: __('None') };
export const DEFAULT_LABEL_ANY = { value: FILTER_ANY, text: __('Any'), title: __('Any') };

View File

@ -9,7 +9,7 @@ import {
} from '@gitlab/ui';
import { debounce } from 'lodash';
import { DEBOUNCE_DELAY, FILTER_NONE_ANY, OPERATOR_IS_NOT } from '../constants';
import { DEBOUNCE_DELAY, FILTER_NONE_ANY, OPERATOR_NOT } from '../constants';
import {
getRecentlyUsedSuggestions,
setTokenValueToRecentlyUsed,
@ -100,7 +100,7 @@ export default {
return this.getActiveTokenValue(this.suggestions, this.value.data);
},
availableDefaultSuggestions() {
if (this.value.operator === OPERATOR_IS_NOT) {
if (this.value.operator === OPERATOR_NOT) {
return this.defaultSuggestions.filter(
(suggestion) => !FILTER_NONE_ANY.includes(suggestion.value),
);

View File

@ -11,7 +11,7 @@ import Api from '~/api';
import { updateHistory, setUrlParams } from '~/lib/utils/url_utility';
import Tracking from '~/tracking';
import {
OPERATOR_IS_ONLY,
OPERATORS_IS,
TOKEN_TITLE_ASSIGNEE,
TOKEN_TITLE_AUTHOR,
} from '~/vue_shared/components/filtered_search_bar/constants';
@ -119,7 +119,7 @@ export default {
unique: true,
symbol: '@',
token: AuthorToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
fetchPath: this.projectPath,
fetchAuthors: Api.projectUsers.bind(Api),
},
@ -130,7 +130,7 @@ export default {
unique: true,
symbol: '@',
token: AuthorToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
fetchPath: this.projectPath,
fetchAuthors: Api.projectUsers.bind(Api),
},

View File

@ -2736,11 +2736,6 @@ class Project < ApplicationRecord
ci_config_path.blank? || ci_config_path == Gitlab::FileDetector::PATTERNS[:gitlab_ci]
end
# DO NOT USE. This method will be deprecated soon
def uses_external_project_ci_config?
!!(ci_config_path =~ %r{@.+/.+})
end
def limited_protected_branches(limit)
protected_branches.limit(limit)
end
@ -2861,11 +2856,6 @@ class Project < ApplicationRecord
repository.gitlab_ci_yml_for(sha, ci_config_path_or_default)
end
# DO NOT USE. This method will be deprecated soon
def ci_config_external_project
Project.find_by_full_path(ci_config_path.split('@', 2).last)
end
def enabled_group_deploy_keys
return GroupDeployKey.none unless group

View File

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/377679
milestone: '15.6'
type: development
group: group::integrations
default_enabled: false
default_enabled: true

View File

@ -332,6 +332,28 @@ To check an installed Git LFS client's version, run this command:
git lfs version
```
## Error viewing a PDF file
When LFS has been configured with object storage and `proxy_download` set to
`false`, [you may see an error when previewing a PDF file from the Web browser](https://gitlab.com/gitlab-org/gitlab/-/issues/248100):
```plaintext
An error occurred while loading the file. Please try again later.
```
This occurs due to Cross-Origin Resource Sharing (CORS) restrictions:
the browser attempts to load the PDF from object storage, but the object
storage provider rejects the request since the GitLab domain differs
from the object storage domain.
To fix this issue, configure your object storage provider's CORS
settings to allow the GitLab domain. See the following documentation
for more details:
1. [AWS S3](https://aws.amazon.com/premiumsupport/knowledge-center/s3-configure-cors/)
1. [Google Cloud Storage](https://cloud.google.com/storage/docs/configuring-cors)
1. [Azure Storage](https://learn.microsoft.com/en-us/rest/api/storageservices/cross-origin-resource-sharing--cors--support-for-the-azure-storage-services).
## Known limitations
- Only compatible with the Git LFS client versions 1.1.0 and later, or 1.0.2.

View File

@ -638,6 +638,16 @@ if this access is not in place include:
Received status code 403 from server: Forbidden
```
- Object storage buckets need to allow Cross-Origin Resource Sharing
(CORS) access from the URL of the GitLab instance. Attempting to load
a PDF in the repository page may show the following error:
```plaintext
An error occurred while loading the file. Please try again later.
```
See [the LFS documentation](lfs/index.md#error-viewing-a-pdf-file) for more details.
Getting a `403 Forbidden` response is specifically called out on the
[package repository documentation](packages/index.md#using-object-storage)
as a side effect of how some build tools work.

View File

@ -1081,6 +1081,190 @@ Reverting the PostgreSQL upgrade with `gitlab-ctl revert-pg-upgrade` has the sam
`gitlab-ctl pg-upgrade`. You should follow the same procedure by first stopping the replicas,
then reverting the leader, and finally reverting the replicas.
### Near zero downtime upgrade of PostgreSQL in a Patroni cluster (Experimental)
Patroni enables you to run a major PostgreSQL upgrade without shutting down the cluster. However, this
requires additional resources to host the new Patroni nodes with the upgraded PostgreSQL. In practice, with this
procedure, you are:
- Creating a new Patroni cluster with a new version of PostgreSQL.
- Migrating the data from the existing cluster.
This procedure is non-invasive, and does not impact your existing cluster before switching it off.
However, it can be both time- and resource-consuming. Consider their trade-offs with availability.
The steps, in order:
1. [Provision resources for the new cluster](#provision-resources-for-the-new-cluster).
1. [Preflight check](#preflight-check).
1. [Configure the leader of the new cluster](#configure-the-leader-of-the-new-cluster).
1. [Start publisher on the existing leader](#start-publisher-on-the-existing-leader).
1. [Copy the data from the existing cluster](#copy-the-data-from-the-existing-cluster).
1. [Replicate data from the existing cluster](#replicate-data-from-the-existing-cluster).
1. [Grow the new cluster](#grow-the-new-cluster).
1. [Switch the application to use the new cluster](#switch-the-application-to-use-the-new-cluster).
1. [Clean up](#clean-up).
#### Provision resources for the new cluster
You need a new set of resources for Patroni nodes. The new Patroni cluster does not require exactly the same number
of nodes as the existing cluster. You may choose a different number of nodes based on your requirements. The new
cluster uses the existing Consul cluster (with a different `patroni['scope']`) and PgBouncer nodes.
Make sure that at least the leader node of the existing cluster is accessible from the nodes of the new
cluster.
#### Preflight check
We rely on PostgreSQL [logical replication](https://www.postgresql.org/docs/current/logical-replication.html)
to support near-zero downtime upgrades of Patroni clusters. The of
[logical replication requirements](https://www.postgresql.org/docs/current/logical-replication-restrictions.html)
must be met. In particular, `wal_level` must be `logical`. To check the `wal_level`,
run the following command with `gitlab-psql` on any node of the existing cluster:
```sql
SHOW wal_level;
```
By default, Patroni sets `wal_level` to `replica`. You must increase it to `logical`.
Changing `wal_level` requires restarting PostgreSQL, so this step leads to a short
downtime (hence near-zero downtime). To do this on the Patroni **leader** node:
1. Edit `gitlab.rb` by setting:
```ruby
patroni['postgresql']['wal_level'] = 'logical'
```
1. Run `gitlab-ctl reconfigure`. This writes the configuration but does not restart PostgreSQL service.
1. Run `gitlab-ctl patroni restart` to restart PostgreSQL and apply the new `wal_level` without triggering
failover. For the duration of restart cycle, the cluster leader is unavailable.
1. Verify the change by running `SHOW wal_level` with `gitlab-psql`.
#### Configure the leader of the new cluster
Configure the first node of the new cluster. It becomes the leader of the new cluster.
You can use the configuration of the existing cluster, if it is compatible with the new
PostgreSQL version. Refer to the documentation on [configuring Patroni clusters](#configuring-patroni-cluster).
In addition to the common configuration, you must apply the following in `gitlab.rb` to:
1. Make sure that the new Patroni cluster uses a different scope. The scope is used to namespace the Patroni settings
in Consul, making it possible to use the same Consul cluster for the existing and the new clusters.
```ruby
patroni['scope'] = 'postgresql_new-ha'
```
1. Make sure that Consul agents don't mix PostgreSQL services offered by the existing and the new Patroni
clusters. For this purpose, you must use an internal attribute that is currently undocumented:
```ruby
consul['internal']['postgresql_service_name'] = 'postgresql_new'
```
#### Start publisher on the existing leader
On the existing leader, run this SQL statement with `gitlab-psql` to start a logical replication publisher:
```sql
CREATE PUBLICATION patroni_upgrade FOR ALL TABLES;
```
#### Copy the data from the existing cluster
To dump the current database from the existing cluster, run these commands on the
**leader** of the new cluster:
1. Optional. Copy global database objects:
```shell
pg_dumpall -h ${EXISTING_CLUSTER_LEADER} -U gitlab-psql -g | gitlab-psql
```
You can ignore the errors about existing database objects, such as roles. They are
created when the node is configured for the first time.
1. Copy the current database:
```shell
pg_dump -h ${EXISTING_CLUSTER_LEADER} -U gitlab-psql -d gitlabhq_production -s | gitlab-psql
```
Depending on the size of your database, this command may take a while to complete.
The `pg_dump` and `pg_dumpall` commands are in `/opt/gitlab/embedded/bin`. In these commands,
`EXISTING_CLUSTER_LEADER` is the host address of the leader node of the existing cluster.
NOTE:
The `gitlab-psql` user must be able to authenticate the existing leader from the new leader node.
#### Replicate data from the existing cluster
After taking the initial data dump, you must keep the new leader in sync with the
latest changes of your existing cluster. On the new leader, run this SQL statement
with `gitlab-psql` to subscribe to publication of the existing leader:
```sql
CREATE SUBSCRIPTION patroni_upgrade
CONNECTION 'host=EXISTING_CLUSTER_LEADER dbname=gitlabhq_production user=gitlab-psql'
PUBLICATION patroni_upgrade;
```
In this statement, `EXISTING_CLUSTER_LEADER` is the host address of the leader node
of the existing cluster. You can also use
[other parameters](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS)
to change the connection string. For example, you can pass the authentication password.
To check the status of replication, run these queries:
- `SELECT * FROM pg_replication_slots WHERE slot_name = 'patroni_upgrade'` on the existing leader (the publisher).
- `SELECT * FROM pg_stat_subscription` on the new leader (the subscriber).
#### Grow the new cluster
Configure other nodes of the new cluster in the way you
[configured the leader](#configure-the-leader-of-the-new-cluster).
Make sure that you use the same `patroni['scope']` and
`consul['internal']['postgresql_service_name']`.
What happens here:
- The application still uses the existing leader as its database backend.
- The logical replication ensures that the new leader keeps in sync.
- When other nodes are added to the new cluster, Patroni handles
the replication to the these nodes.
It is a good idea to wait until the replica nodes of the new cluster are initialized and caught up on the replication
lag.
#### Switch the application to use the new cluster
Up to this point, you can stop the upgrade procedure without losing data on the
existing cluster. When you switch the database backend of the application and point
it to the new cluster, the old cluster does not receive new updates. It falls behind
the new cluster. After this point, any recovery must be done from the nodes of the new cluster.
To do the switch on **all** PgBouncer nodes:
1. Edit `gitlab.rb` by setting:
```ruby
consul['watchers'] = %w(postgresql_new)
consul['internal']['postgresql_service_name'] = 'postgresql_new'
```
1. Run `gitlab-ctl reconfigure`.
1. You must also run `rm /var/opt/gitlab/consul/watcher_postgresql.json`.
This is a [known issue](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/7293).
#### Clean up
After completing these steps, then you can clean up the resources of the old Patroni cluster.
They are no longer needed. However, before removing the resources, remove the
logical replication subscription on the new leader by running `DROP SUBSCRIPTION patroni_upgrade`
with `gitlab-psql`.
## Troubleshooting
### Consul and PostgreSQL changes not taking effect

View File

@ -154,3 +154,69 @@ Example response:
}
]
```
## Get the status of a merge request on a merge train
Get merge train information for the requested merge request.
```plaintext
GET /projects/:id/merge_trains/merge_requests/:merge_request_iid
```
Supported attributes:
| Attribute | Type | Required | Description |
| ------------------- | -------------- | -------- | ------------------------------------------------------------------------------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](index.md#namespaced-path-encoding). |
| `merge_request_iid` | integer | yes | The internal ID of the merge request. |
Example request:
```shell
curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/597/merge_trains/merge_requests/1"
```
Example response:
```json
{
"id": 267,
"merge_request": {
"id": 273,
"iid": 1,
"project_id": 597,
"title": "My title 9",
"description": null,
"state": "opened",
"created_at": "2022-10-31T19:06:05.725Z",
"updated_at": "2022-10-31T19:06:05.725Z",
"web_url": "http://localhost/namespace18/project21/-/merge_requests/1"
},
"user": {
"id": 933,
"username": "user12",
"name": "Sidney Jones31",
"state": "active",
"avatar_url": "https://www.gravatar.com/avatar/6c8365de387cb3db10ecc7b1880203c4?s=80\u0026d=identicon",
"web_url": "http://localhost/user12"
},
"pipeline": {
"id": 273,
"iid": 1,
"project_id": 598,
"sha": "b83d6e391c22777fca1ed3012fce84f633d7fed0",
"ref": "main",
"status": "pending",
"source": "push",
"created_at": "2022-10-31T19:06:06.231Z",
"updated_at": "2022-10-31T19:06:06.231Z",
"web_url": "http://localhost/namespace19/project22/-/pipelines/273"
},
"created_at": "2022-10-31T19:06:06.237Z",
"updated_at":"2022-10-31T19:06:06.237Z",
"target_branch":"main",
"status":"idle",
"merged_at":null,
"duration":null
}
```

View File

@ -104,7 +104,9 @@ Example response:
"floc_enabled": false,
"external_pipeline_validation_service_timeout": null,
"external_pipeline_validation_service_token": null,
"external_pipeline_validation_service_url": null
"external_pipeline_validation_service_url": null,
"jira_connect_application_key": null,
"jira_connect_proxy_url": null
}
```
@ -218,7 +220,9 @@ Example response:
"external_pipeline_validation_service_timeout": null,
"external_pipeline_validation_service_token": null,
"external_pipeline_validation_service_url": null,
"can_create_group": false
"can_create_group": false,
"jira_connect_application_key": "123",
"jira_connect_proxy_url": "http://gitlab.example.com"
}
```
@ -505,6 +509,8 @@ listed in the descriptions of the relevant settings.
| `whats_new_variant` | string | no | What's new variant, possible values: `all_tiers`, `current_tier`, and `disabled`. |
| `web_ide_clientside_preview_enabled` | boolean | no | Live Preview (allow live previews of JavaScript projects in the Web IDE using CodeSandbox Live Preview). |
| `wiki_page_max_content_bytes` | integer | no | Maximum wiki page content size in **bytes**. Default: 52428800 Bytes (50 MB). The minimum value is 1024 bytes. |
| `jira_connect_application_key` | String | no | Application ID of the OAuth application that should be used to authenticate with the GitLab.com for Jira Cloud app |
| `jira_connect_proxy_url` | String | no | URL of the GitLab instance that should be used as a proxy for the GitLab.com for Jira Cloud app |
### Package Registry: Package file size limits

View File

@ -159,26 +159,17 @@ You can use a Rake task to update the `CODEOWNERS` file.
To update the `CODEOWNERS` file:
1. Open a merge request to update
[the Rake task](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/tasks/gitlab/tw/codeowners.rake)
with the latest [TW team assignments](https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments).
1. Assign the merge request to a backend maintainer for review and merge.
1. After the MR is merged, go to the root of the `gitlab` repository.
1. Run the Rake task and save the output in a file:
```shell
bundle exec rake tw:codeowners > ~/Desktop/updates.md
```
1. Open the file (for example, `~/Desktop/updates.md`) and copy everything
except the errors at the bottom of the file.
1. Open the [`CODEOWNERS`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/CODEOWNERS)
file and paste the lines into the `^[Documentation Pages]` section.
WARNING:
The documentation section is not the last section of the `CODEOWNERS` file. Don't
delete data that isn't ours!
1. Review the [TW team assignments](https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments)
in the [`codeowners.rake`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/tasks/gitlab/tw/codeowners.rake)
file. If any assignments have changed:
1. Update the `codeowners.rake` file with the changes.
1. Assign the merge request to a technical writing manager for review and merge.
1. After the changes to `codeowners.rake` are merged, go to the root of the `gitlab` repository.
1. Run the Rake task with this command: `bundle exec rake tw:codeowners`
1. Review the command output for any pages that need attention to
their metadata. Handle any needed changes in a separate merge request.
1. Add the changes to the CODEOWNERS file to Git: `git add .gitlab/CODEOWNERS`
1. Commit your changes to your branch, and push your branch up to `origin`.
1. Create a merge request and assign it to a technical writing manager for review.
## Move, rename, or delete a page

View File

@ -1064,6 +1064,21 @@ Instead of **and/or**, use **or** or re-write the sentence. This rule also appli
Do not use **slave**. Another option is **secondary**. ([Vale](../testing.md#vale) rule: [`InclusionCultural.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/.vale/gitlab/InclusionCultural.yml))
## subscription tier
Do not confuse **subscription** or **subscription tier** with **[license](#license)**.
A user purchases a **subscription**. That subscription has a **tier**.
To describe tiers:
| Instead of | Use |
|---------------------------------|----------------------------------------|
| In the Free tier or greater | In any tier |
| In the Free tier or higher | In any tier |
| In the Premium tier or greater | In the Premium or Ultimate tier |
| In the Premium tier or higher | In the Premium or Ultimate tier |
| In the Premium tier or lower | In the Free or Premium tier |
## subgroup
Use **subgroup** (no hyphen) instead of **sub-group**. ([Vale](../testing.md#vale) rule: [`SubstitutionSuggestions.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc/.vale/gitlab/SubstitutionSuggestions.yml))

View File

@ -17,7 +17,7 @@ pre-push:
tags: view haml style
files: git diff --name-only --diff-filter=d $(git merge-base origin/master HEAD)..HEAD
glob: '*.html.haml'
run: REVEAL_RUBOCOP_TODO=0 bundle exec haml-lint --config .haml-lint.yml {files}
run: REVEAL_RUBOCOP_TODO=0 bundle exec haml-lint --parallel --config .haml-lint.yml {files}
markdownlint:
tags: documentation style
files: git diff --name-only --diff-filter=d $(git merge-base origin/master HEAD)..HEAD

View File

@ -184,6 +184,8 @@ module API
optional :group_runner_token_expiration_interval, type: Integer, desc: 'Token expiration interval for group runners, in seconds'
optional :project_runner_token_expiration_interval, type: Integer, desc: 'Token expiration interval for project runners, in seconds'
optional :pipeline_limit_per_project_user_sha, type: Integer, desc: "Maximum number of pipeline creation requests allowed per minute per user and commit. Set to 0 for unlimited requests per minute."
optional :jira_connect_application_key, type: String, desc: "Application ID of the OAuth application that should be used to authenticate with the GitLab.com for Jira Cloud app"
optional :jira_connect_proxy_url, type: String, desc: "URL of the GitLab instance that should be used as a proxy for the GitLab.com for Jira Cloud app"
Gitlab::SSHPublicKey.supported_types.each do |type|
optional :"#{type}_key_restriction",

View File

@ -9,7 +9,7 @@ module Banzai
module Filter
# HTML Filter to highlight fenced code blocks
#
class SyntaxHighlightFilter < HTML::Pipeline::Filter
class SyntaxHighlightFilter < TimeoutHtmlPipelineFilter
include OutputSafety
LANG_PARAMS_DELIMITER = ':'
@ -19,7 +19,7 @@ module Banzai
CSS = 'pre:not([data-kroki-style]) > code:only-child'
XPATH = Gitlab::Utils::Nokogiri.css_to_xpath(CSS).freeze
def call
def call_with_timeout
doc.xpath(XPATH).each do |node|
highlight_node(node)
end

View File

@ -0,0 +1,42 @@
# frozen_string_literal: true
module Banzai
module Filter
# HTML Filter that wraps a filter in a Gitlab::RenderTimeout.
# This way partial results can be returned, and the entire pipeline
# is not killed.
#
# This should not be used for any filter that must be allowed to complete,
# like a `ReferenceRedactorFilter`
#
class TimeoutHtmlPipelineFilter < HTML::Pipeline::Filter
RENDER_TIMEOUT = 2.seconds
def call
if Feature.enabled?(:markup_rendering_timeout, context[:project])
Gitlab::RenderTimeout.timeout(foreground: RENDER_TIMEOUT) { call_with_timeout }
else
call_with_timeout
end
rescue Timeout::Error => e
class_name = self.class.name.demodulize
timeout_counter.increment(source: class_name)
Gitlab::ErrorTracking.track_exception(e, project_id: context[:project]&.id, class_name: class_name)
# we've timed out, but some work may have already been completed,
# so go ahead and return the document
doc
end
def call_with_timeout
raise NotImplementedError
end
private
def timeout_counter
Gitlab::Metrics.counter(:banzai_filter_timeouts_total, 'Count of the Banzai filters that time out')
end
end
end
end

View File

@ -2,7 +2,7 @@
module Gitlab
module Git
class CrossRepoComparer
class CrossRepo
attr_reader :source_repo, :target_repo
def initialize(source_repo, target_repo)
@ -10,15 +10,8 @@ module Gitlab
@target_repo = target_repo
end
def compare(source_ref, target_ref, straight:)
ensuring_ref_in_source(target_ref) do |target_commit_id|
Gitlab::Git::Compare.new(
source_repo,
target_commit_id,
source_ref,
straight: straight
)
end
def execute(target_ref, &blk)
ensuring_ref_in_source(target_ref, &blk)
end
private

View File

@ -823,9 +823,14 @@ module Gitlab
end
def compare_source_branch(target_branch_name, source_repository, source_branch_name, straight:)
CrossRepoComparer
.new(source_repository, self)
.compare(source_branch_name, target_branch_name, straight: straight)
CrossRepo.new(source_repository, self).execute(target_branch_name) do |target_commit_id|
Gitlab::Git::Compare.new(
source_repository,
target_commit_id,
source_branch_name,
straight: straight
)
end
end
def write_ref(ref_path, ref, old_ref: nil)

View File

@ -48,6 +48,9 @@ module Gitlab
attrs[:GitConfigOptions] << "receive.maxInputSize=#{receive_max_input_size.megabytes}"
end
remote_ip = Gitlab::ApplicationContext.current_context_attribute(:remote_ip)
attrs[:RemoteIP] = remote_ip if remote_ip.present?
attrs
end

View File

@ -51,9 +51,12 @@ namespace :gitlab do
desc 'GitLab | UsageDataMetrics | Generate raw SQL metrics queries for RSpec'
task generate_sql_metrics_queries: :environment do
require 'active_support/testing/time_helpers'
include ActiveSupport::Testing::TimeHelpers
path = Rails.root.join('tmp', 'test')
queries = Timecop.freeze(2021, 1, 1) do
queries = travel_to(Time.utc(2021, 1, 1)) do
Gitlab::Usage::ServicePingReport.for(output: :metrics_queries)
end

View File

@ -4551,6 +4551,9 @@ msgstr ""
msgid "An error occurred. Please try again."
msgstr ""
msgid "An error occurred. Unable to reopen this merge request."
msgstr ""
msgid "An example project for managing Kubernetes clusters integrated with GitLab"
msgstr ""
@ -33684,6 +33687,9 @@ msgid_plural "Refreshing in %d seconds to show the updated status..."
msgstr[0] ""
msgstr[1] ""
msgid "Refreshing..."
msgstr ""
msgid "Regenerate export"
msgstr ""
@ -34220,6 +34226,9 @@ msgstr ""
msgid "Reopened this %{quick_action_target}."
msgstr ""
msgid "Reopening..."
msgstr ""
msgid "Reopens this %{quick_action_target}."
msgstr ""

View File

@ -4,25 +4,29 @@ require 'spec_helper'
RSpec.describe 'Active user sessions', :clean_gitlab_redis_sessions do
it 'successful login adds a new active user login' do
user = create(:user)
now = Time.zone.parse('2018-03-12 09:06')
Timecop.freeze(now) do
user = create(:user)
travel_to(now) do
gitlab_sign_in(user)
expect(page).to have_current_path root_path, ignore_query: true
sessions = ActiveSession.list(user)
expect(sessions.count).to eq 1
gitlab_sign_out
end
# refresh the current page updates the updated_at
Timecop.freeze(now + 1.minute) do
visit current_path
# refresh the current page updates the updated_at
travel_to(now + 1.minute) do
gitlab_sign_in(user)
sessions = ActiveSession.list(user)
expect(sessions.first).to have_attributes(
created_at: Time.zone.parse('2018-03-12 09:06'),
updated_at: Time.zone.parse('2018-03-12 09:07')
)
end
visit current_path
sessions = ActiveSession.list(user)
expect(sessions.first).to have_attributes(
created_at: Time.zone.parse('2018-03-12 09:06'),
updated_at: Time.zone.parse('2018-03-12 09:07')
)
end
end

View File

@ -2,8 +2,8 @@ import { GlFilteredSearchToken } from '@gitlab/ui';
import { keyBy } from 'lodash';
import { ListType } from '~/boards/constants';
import {
OPERATOR_IS_AND_IS_NOT,
OPERATOR_IS_ONLY,
OPERATORS_IS_NOT,
OPERATORS_IS,
TOKEN_TITLE_ASSIGNEE,
TOKEN_TITLE_AUTHOR,
TOKEN_TITLE_LABEL,
@ -747,7 +747,7 @@ export const mockConfidentialToken = {
title: 'Confidential',
unique: true,
token: GlFilteredSearchToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
options: [
{ icon: 'eye-slash', value: 'yes', title: 'Yes' },
{ icon: 'eye', value: 'no', title: 'No' },
@ -759,7 +759,7 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, isSignedI
icon: 'user',
title: TOKEN_TITLE_ASSIGNEE,
type: TOKEN_TYPE_ASSIGNEE,
operators: OPERATOR_IS_AND_IS_NOT,
operators: OPERATORS_IS_NOT,
token: AuthorToken,
unique: true,
fetchAuthors,
@ -769,7 +769,7 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, isSignedI
icon: 'pencil',
title: TOKEN_TITLE_AUTHOR,
type: TOKEN_TYPE_AUTHOR,
operators: OPERATOR_IS_AND_IS_NOT,
operators: OPERATORS_IS_NOT,
symbol: '@',
token: AuthorToken,
unique: true,
@ -780,7 +780,7 @@ export const mockTokens = (fetchLabels, fetchAuthors, fetchMilestones, isSignedI
icon: 'labels',
title: TOKEN_TITLE_LABEL,
type: TOKEN_TYPE_LABEL,
operators: OPERATOR_IS_AND_IS_NOT,
operators: OPERATORS_IS_NOT,
token: LabelToken,
unique: false,
symbol: '~',

View File

@ -7,7 +7,7 @@ import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import TagToken, { TAG_SUGGESTIONS_PATH } from '~/ci/runner/components/search_tokens/tag_token.vue';
import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import { getRecentlyUsedSuggestions } from '~/vue_shared/components/filtered_search_bar/filtered_search_utils';
jest.mock('~/flash');
@ -42,7 +42,7 @@ const mockTagTokenConfig = {
type: 'tag',
token: TagToken,
recentSuggestionsStorageKey: mockStorageKey,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
};
describe('TagToken', () => {

View File

@ -13,15 +13,6 @@ RSpec.describe 'Freeze Periods (JavaScript fixtures)' do
remove_repository(project)
end
around do |example|
freeze_time do
# Mock time to sept 19 (intl. talk like a pirate day)
travel_to(Time.utc(2020, 9, 19))
example.run
end
end
describe API::FreezePeriods, '(JavaScript fixtures)', type: :request do
include ApiHelpers

View File

@ -1,7 +1,7 @@
import {
FILTERED_SEARCH_TERM,
OPERATOR_IS,
OPERATOR_IS_NOT,
OPERATOR_NOT,
OPERATOR_OR,
TOKEN_TYPE_ASSIGNEE,
TOKEN_TYPE_AUTHOR,
@ -184,41 +184,41 @@ export const locationSearchWithSpecialValues = [
export const filteredTokens = [
{ type: TOKEN_TYPE_AUTHOR, value: { data: 'homer', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_AUTHOR, value: { data: 'marge', operator: OPERATOR_IS_NOT } },
{ type: TOKEN_TYPE_AUTHOR, value: { data: 'marge', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: 'bart', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: 'lisa', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: '5', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: 'patty', operator: OPERATOR_IS_NOT } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: 'selma', operator: OPERATOR_IS_NOT } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: 'patty', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: 'selma', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: 'carl', operator: OPERATOR_OR } },
{ type: TOKEN_TYPE_ASSIGNEE, value: { data: 'lenny', operator: OPERATOR_OR } },
{ type: TOKEN_TYPE_MILESTONE, value: { data: 'season 3', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_MILESTONE, value: { data: 'season 4', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_MILESTONE, value: { data: 'season 20', operator: OPERATOR_IS_NOT } },
{ type: TOKEN_TYPE_MILESTONE, value: { data: 'season 30', operator: OPERATOR_IS_NOT } },
{ type: TOKEN_TYPE_MILESTONE, value: { data: 'season 20', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_MILESTONE, value: { data: 'season 30', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_LABEL, value: { data: 'cartoon', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_LABEL, value: { data: 'tv', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_LABEL, value: { data: 'live action', operator: OPERATOR_IS_NOT } },
{ type: TOKEN_TYPE_LABEL, value: { data: 'drama', operator: OPERATOR_IS_NOT } },
{ type: TOKEN_TYPE_LABEL, value: { data: 'live action', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_LABEL, value: { data: 'drama', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_RELEASE, value: { data: 'v3', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_RELEASE, value: { data: 'v4', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_RELEASE, value: { data: 'v20', operator: OPERATOR_IS_NOT } },
{ type: TOKEN_TYPE_RELEASE, value: { data: 'v30', operator: OPERATOR_IS_NOT } },
{ type: TOKEN_TYPE_RELEASE, value: { data: 'v20', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_RELEASE, value: { data: 'v30', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_TYPE, value: { data: 'issue', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_TYPE, value: { data: 'feature', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_TYPE, value: { data: 'bug', operator: OPERATOR_IS_NOT } },
{ type: TOKEN_TYPE_TYPE, value: { data: 'incident', operator: OPERATOR_IS_NOT } },
{ type: TOKEN_TYPE_TYPE, value: { data: 'bug', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_TYPE, value: { data: 'incident', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_MY_REACTION, value: { data: 'thumbsup', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_MY_REACTION, value: { data: 'thumbsdown', operator: OPERATOR_IS_NOT } },
{ type: TOKEN_TYPE_MY_REACTION, value: { data: 'thumbsdown', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_CONFIDENTIAL, value: { data: 'yes', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_ITERATION, value: { data: '4', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_ITERATION, value: { data: '12', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_ITERATION, value: { data: '20', operator: OPERATOR_IS_NOT } },
{ type: TOKEN_TYPE_ITERATION, value: { data: '42', operator: OPERATOR_IS_NOT } },
{ type: TOKEN_TYPE_ITERATION, value: { data: '20', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_ITERATION, value: { data: '42', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_EPIC, value: { data: '12', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_EPIC, value: { data: '34', operator: OPERATOR_IS_NOT } },
{ type: TOKEN_TYPE_EPIC, value: { data: '34', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_WEIGHT, value: { data: '1', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_WEIGHT, value: { data: '3', operator: OPERATOR_IS_NOT } },
{ type: TOKEN_TYPE_WEIGHT, value: { data: '3', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_CONTACT, value: { data: '123', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_ORGANIZATION, value: { data: '456', operator: OPERATOR_IS } },
{ type: FILTERED_SEARCH_TERM, value: { data: 'find' } },

View File

@ -1,6 +1,6 @@
import { GlFilteredSearch } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import JobsFilteredSearch from '~/jobs/components/filtered_search/jobs_filtered_search.vue';
import { mockFailedSearchToken } from '../../mock_data';
@ -41,7 +41,7 @@ describe('Jobs filtered search', () => {
icon: 'status',
title: 'Status',
unique: true,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
});
});

View File

@ -8,7 +8,7 @@ import ArtifactsList from '~/packages_and_registries/harbor_registry/components/
import waitForPromises from 'helpers/wait_for_promises';
import DetailsHeader from '~/packages_and_registries/harbor_registry/components/details/details_header.vue';
import PersistedSearch from '~/packages_and_registries/shared/components/persisted_search.vue';
import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import {
NAME_SORT_FIELD,
TOKEN_TYPE_TAG_NAME,
@ -137,7 +137,7 @@ describe('Harbor Details Page', () => {
title: s__('HarborRegistry|Tag'),
unique: true,
token: GlFilteredSearchToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
},
],
});

View File

@ -6,7 +6,7 @@ import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import Api from '~/api';
import axios from '~/lib/utils/axios_utils';
import PipelinesFilteredSearch from '~/pipelines/components/pipelines_list/pipelines_filtered_search.vue';
import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import { TRACKING_CATEGORIES } from '~/pipelines/constants';
import { users, mockSearch, branches, tags } from '../mock_data';
@ -63,7 +63,7 @@ describe('Pipelines filtered search', () => {
title: 'Trigger author',
unique: true,
projectId: '21',
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
});
expect(findBranchToken()).toMatchObject({
@ -73,7 +73,7 @@ describe('Pipelines filtered search', () => {
unique: true,
projectId: '21',
defaultBranchName: 'main',
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
});
expect(findSourceToken()).toMatchObject({
@ -81,7 +81,7 @@ describe('Pipelines filtered search', () => {
icon: 'trigger-source',
title: 'Source',
unique: true,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
});
expect(findStatusToken()).toMatchObject({
@ -89,7 +89,7 @@ describe('Pipelines filtered search', () => {
icon: 'status',
title: 'Status',
unique: true,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
});
expect(findTagToken()).toMatchObject({
@ -97,7 +97,7 @@ describe('Pipelines filtered search', () => {
icon: 'tag',
title: 'Tag name',
unique: true,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
});
});

View File

@ -1,9 +1,28 @@
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { shallowMount, mount } from '@vue/test-utils';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import waitForPromises from 'helpers/wait_for_promises';
import api from '~/api';
import showGlobalToast from '~/vue_shared/plugins/global_toast';
import closedComponent from '~/vue_merge_request_widget/components/states/mr_widget_closed.vue';
import MrWidgetAuthorTime from '~/vue_merge_request_widget/components/mr_widget_author_time.vue';
import StateContainer from '~/vue_merge_request_widget/components/state_container.vue';
import Actions from '~/vue_merge_request_widget/components/action_buttons.vue';
import { MR_WIDGET_CLOSED_REOPEN_FAILURE } from '~/vue_merge_request_widget/i18n';
jest.mock('~/api', () => ({
updateMergeRequest: jest.fn(),
}));
jest.mock('~/vue_shared/plugins/global_toast');
useMockLocationHelper();
const MOCK_DATA = {
iid: 1,
metrics: {
mergedBy: {},
closedBy: {
@ -19,22 +38,39 @@ const MOCK_DATA = {
},
targetBranchPath: '/twitter/flight/commits/so_long_jquery',
targetBranch: 'so_long_jquery',
targetProjectId: 'twitter/flight',
};
function createComponent({ shallow = true, props = {} } = {}) {
const mounter = shallow ? shallowMount : mount;
return mounter(closedComponent, {
propsData: {
mr: MOCK_DATA,
...props,
},
});
}
function findActions(wrapper) {
return wrapper.findComponent(StateContainer).findComponent(Actions);
}
function findReopenActionButton(wrapper) {
return findActions(wrapper).find('button[data-testid="extension-actions-reopen-button"]');
}
describe('MRWidgetClosed', () => {
let wrapper;
beforeEach(() => {
wrapper = shallowMount(closedComponent, {
propsData: {
mr: MOCK_DATA,
},
});
wrapper = createComponent();
});
afterEach(() => {
wrapper.destroy();
wrapper = null;
if (wrapper) {
wrapper.destroy();
}
});
it('renders closed icon', () => {
@ -51,4 +87,93 @@ describe('MRWidgetClosed', () => {
dateReadable: MOCK_DATA.metrics.readableClosedAt,
});
});
describe('actions', () => {
describe('reopen', () => {
beforeEach(() => {
window.gon = { current_user_id: 1 };
api.updateMergeRequest.mockResolvedValue(true);
wrapper = createComponent({ shallow: false });
});
it('shows the "reopen" button', () => {
expect(wrapper.findComponent(StateContainer).props().actions.length).toBe(1);
expect(findReopenActionButton(wrapper).text()).toBe('Reopen');
});
it('does not show widget actions when the user is not logged in', () => {
window.gon = {};
wrapper = createComponent();
expect(findActions(wrapper).exists()).toBe(false);
});
it('makes the reopen request with the correct MR information', async () => {
const reopenButton = findReopenActionButton(wrapper);
reopenButton.trigger('click');
await nextTick();
expect(api.updateMergeRequest).toHaveBeenCalledWith(
MOCK_DATA.targetProjectId,
MOCK_DATA.iid,
{ state_event: 'reopen' },
);
});
it('shows "Reopening..." while the reopen network request is pending', async () => {
const reopenButton = findReopenActionButton(wrapper);
api.updateMergeRequest.mockReturnValue(new Promise(() => {}));
reopenButton.trigger('click');
await nextTick();
expect(reopenButton.text()).toBe('Reopening...');
});
it('shows "Refreshing..." when the reopen has succeeded', async () => {
const reopenButton = findReopenActionButton(wrapper);
reopenButton.trigger('click');
await waitForPromises();
expect(reopenButton.text()).toBe('Refreshing...');
});
it('reloads the page when a reopen has succeeded', async () => {
const reopenButton = findReopenActionButton(wrapper);
reopenButton.trigger('click');
await waitForPromises();
expect(window.location.reload).toHaveBeenCalledTimes(1);
});
it('shows "Reopen" when a reopen request has failed', async () => {
const reopenButton = findReopenActionButton(wrapper);
api.updateMergeRequest.mockRejectedValue(false);
reopenButton.trigger('click');
await waitForPromises();
expect(window.location.reload).not.toHaveBeenCalled();
expect(reopenButton.text()).toBe('Reopen');
});
it('requests a toast popup when a reopen request has failed', async () => {
const reopenButton = findReopenActionButton(wrapper);
api.updateMergeRequest.mockRejectedValue(false);
reopenButton.trigger('click');
await waitForPromises();
expect(showGlobalToast).toHaveBeenCalledTimes(1);
expect(showGlobalToast).toHaveBeenCalledWith(MR_WIDGET_CLOSED_REOPEN_FAILURE);
});
});
});
});

View File

@ -1,7 +1,7 @@
import { GlFilteredSearchToken } from '@gitlab/ui';
import { mockLabels } from 'jest/vue_shared/components/sidebar/labels_select_vue/mock_data';
import Api from '~/api';
import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
import BranchToken from '~/vue_shared/components/filtered_search_bar/tokens/branch_token.vue';
import EmojiToken from '~/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue';
@ -202,7 +202,7 @@ export const mockBranchToken = {
title: 'Source Branch',
unique: true,
token: BranchToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
fetchBranches: Api.branches.bind(Api),
};
@ -213,7 +213,7 @@ export const mockAuthorToken = {
unique: false,
symbol: '@',
token: AuthorToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
fetchPath: 'gitlab-org/gitlab-test',
fetchAuthors: Api.projectUsers.bind(Api),
};
@ -225,7 +225,7 @@ export const mockLabelToken = {
unique: false,
symbol: '~',
token: LabelToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
fetchLabels: () => Promise.resolve(mockLabels),
};
@ -236,7 +236,7 @@ export const mockMilestoneToken = {
unique: true,
symbol: '%',
token: MilestoneToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
fetchMilestones: () => Promise.resolve({ data: mockMilestones }),
};
@ -254,7 +254,7 @@ export const mockReactionEmojiToken = {
title: 'My-Reaction',
unique: true,
token: EmojiToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
fetchEmojis: () => Promise.resolve(mockEmojis),
};
@ -265,7 +265,7 @@ export const mockCrmContactToken = {
token: CrmContactToken,
isProject: false,
fullPath: 'group',
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
unique: true,
};
@ -276,7 +276,7 @@ export const mockCrmOrganizationToken = {
token: CrmOrganizationToken,
isProject: false,
fullPath: 'group',
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
unique: true,
};
@ -286,7 +286,7 @@ export const mockMembershipToken = {
title: 'Membership',
token: GlFilteredSearchToken,
unique: true,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
options: [
{ value: 'exclude', title: 'Direct' },
{ value: 'only', title: 'Inherited' },

View File

@ -17,7 +17,7 @@ import {
import {
DEFAULT_NONE_ANY,
OPERATOR_IS,
OPERATOR_IS_NOT,
OPERATOR_NOT,
} from '~/vue_shared/components/filtered_search_bar/constants';
import {
getRecentlyUsedSuggestions,
@ -301,9 +301,9 @@ describe('BaseToken', () => {
describe('with default suggestions', () => {
describe.each`
operator | shouldRenderFilteredSearchSuggestion
${OPERATOR_IS} | ${true}
${OPERATOR_IS_NOT} | ${false}
operator | shouldRenderFilteredSearchSuggestion
${OPERATOR_IS} | ${true}
${OPERATOR_NOT} | ${false}
`('when operator is $operator', ({ shouldRenderFilteredSearchSuggestion, operator }) => {
beforeEach(() => {
const props = {

View File

@ -2,7 +2,7 @@ import { GlAlert, GlBadge, GlPagination, GlTabs, GlTab } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import Tracking from '~/tracking';
import { OPERATOR_IS_ONLY } from '~/vue_shared/components/filtered_search_bar/constants';
import { OPERATORS_IS } from '~/vue_shared/components/filtered_search_bar/constants';
import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
import PageWrapper from '~/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs.vue';
@ -293,7 +293,7 @@ describe('AlertManagementEmptyState', () => {
unique: true,
symbol: '@',
token: AuthorToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
fetchPath: '/link',
fetchAuthors: expect.any(Function),
},
@ -304,7 +304,7 @@ describe('AlertManagementEmptyState', () => {
unique: true,
symbol: '@',
token: AuthorToken,
operators: OPERATOR_IS_ONLY,
operators: OPERATORS_IS,
fetchPath: '/link',
fetchAuthors: expect.any(Function),
},

View File

@ -192,4 +192,8 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
include_examples "XSS prevention", "ruby"
end
it_behaves_like "filter timeout" do
let(:text) { '<pre lang="ruby"><code>def fun end</code></pre>' }
end
end

View File

@ -0,0 +1,34 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Banzai::Filter::TimeoutHtmlPipelineFilter do
include FilterSpecHelper
it_behaves_like 'filter timeout' do
let(:text) { '<p>some text</p>' }
end
it 'raises NotImplementedError' do
expect { filter('test') }.to raise_error NotImplementedError
end
context 'when markup_rendering_timeout is disabled' do
it 'waits until the execution completes' do
text = '<p>some text</p>'
stub_feature_flags(markup_rendering_timeout: false)
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:call_with_timeout) do
text
end
end
expect(Gitlab::RenderTimeout).not_to receive(:timeout)
result = filter(text)
expect(result).to eq text
end
end
end

View File

@ -22,10 +22,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::BaseQueryBuilder do
project.add_maintainer(user)
mr1.metrics.update!(merged_at: 1.month.ago)
mr2.metrics.update!(merged_at: Time.now)
end
around do |example|
Timecop.freeze { example.run }
freeze_time
end
describe 'date range parameters' do

View File

@ -18,10 +18,6 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Median do
subject { described_class.new(stage: stage, query: query).seconds }
around do |example|
Timecop.freeze { example.run }
end
it 'retruns nil when no results' do
expect(subject).to eq(nil)
end
@ -30,11 +26,11 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::Median do
merge_request1 = create(:merge_request, source_branch: '1', target_project: project, source_project: project)
merge_request2 = create(:merge_request, source_branch: '2', target_project: project, source_project: project)
travel_to(5.minutes.from_now) do
travel(5.minutes) do
merge_request1.metrics.update!(merged_at: Time.zone.now)
end
travel_to(10.minutes.from_now) do
travel(10.minutes) do
merge_request2.metrics.update!(merged_at: Time.zone.now)
end

View File

@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
around do |example|
Timecop.freeze { example.run }
freeze_time { example.run }
end
let(:params) { { from: 1.year.ago, current_user: user } }

View File

@ -22,14 +22,14 @@ RSpec.describe Gitlab::Auth::UniqueIpsLimiter, :clean_gitlab_redis_shared_state
end
it 'resets count after specified time window' do
Timecop.freeze do
freeze_time do
expect(described_class.update_and_return_ips_count(user.id, 'ip2')).to eq(1)
expect(described_class.update_and_return_ips_count(user.id, 'ip3')).to eq(2)
end
travel_to(Time.now.utc + described_class.config.unique_ips_limit_time_window) do
expect(described_class.update_and_return_ips_count(user.id, 'ip4')).to eq(1)
expect(described_class.update_and_return_ips_count(user.id, 'ip5')).to eq(2)
end
travel_to(Time.now.utc + described_class.config.unique_ips_limit_time_window) do
expect(described_class.update_and_return_ips_count(user.id, 'ip4')).to eq(1)
expect(described_class.update_and_return_ips_count(user.id, 'ip5')).to eq(2)
end
end
end

View File

@ -17,38 +17,44 @@ RSpec.describe Gitlab::Checks::TimedLogger do
logger.append_message("Checking ref: #{ref}")
end
around do |example|
freeze_time do
example.run
end
end
describe '#log_timed' do
it 'logs message' do
Timecop.freeze(start + 30.seconds) do
logger.log_timed(log_messages[:foo], start) { bar_check }
end
travel_to(start + 30.seconds)
logger.log_timed(log_messages[:foo], start) { bar_check }
expect(logger.full_message).to eq("Checking ref: bar\nFoo message... (30000.0ms)")
end
context 'when time limit was reached' do
it 'cancels action' do
Timecop.freeze(start + 50.seconds) do
expect do
logger.log_timed(log_messages[:foo], start) do
bar_check
end
end.to raise_error(described_class::TimeoutError)
end
travel_to(start + 50.seconds)
expect do
logger.log_timed(log_messages[:foo], start) do
bar_check
end
end.to raise_error(described_class::TimeoutError)
expect(logger.full_message).to eq("Checking ref: bar\nFoo message... (cancelled)")
end
it 'cancels action with time elapsed if work was performed' do
Timecop.freeze(start + 30.seconds) do
expect do
logger.log_timed(log_messages[:foo], start) do
grpc_check
end
end.to raise_error(described_class::TimeoutError)
travel_to(start + 30.seconds)
expect(logger.full_message).to eq("Checking ref: bar\nFoo message... (cancelled after 30000.0ms)")
end
expect do
logger.log_timed(log_messages[:foo], start) do
grpc_check
end
end.to raise_error(described_class::TimeoutError)
expect(logger.full_message).to eq("Checking ref: bar\nFoo message... (cancelled after 30000.0ms)")
end
end
end

View File

@ -29,8 +29,8 @@ RSpec.describe Gitlab::CycleAnalytics::StageSummary do
context 'when from date is given' do
before do
Timecop.freeze(5.days.ago) { create(:issue, project: project) }
Timecop.freeze(5.days.from_now) { create(:issue, project: project) }
travel_to(5.days.ago) { create(:issue, project: project) }
travel_to(5.days.from_now) { create(:issue, project: project) }
end
it "finds the number of issues created after the 'from date'" do
@ -45,15 +45,15 @@ RSpec.describe Gitlab::CycleAnalytics::StageSummary do
end
it "doesn't find issues from other projects" do
Timecop.freeze(5.days.from_now) { create(:issue, project: create(:project)) }
travel_to(5.days.from_now) { create(:issue, project: create(:project)) }
expect(subject[:value]).to eq('-')
end
context 'when `to` parameter is given' do
before do
Timecop.freeze(5.days.ago) { create(:issue, project: project) }
Timecop.freeze(5.days.from_now) { create(:issue, project: project) }
travel_to(5.days.ago) { create(:issue, project: project) }
travel_to(5.days.from_now) { create(:issue, project: project) }
end
it "doesn't find any record" do
@ -78,8 +78,8 @@ RSpec.describe Gitlab::CycleAnalytics::StageSummary do
context 'when from date is given' do
before do
Timecop.freeze(5.days.ago) { create_commit("Test message", project, user, 'master') }
Timecop.freeze(5.days.from_now) { create_commit("Test message", project, user, 'master') }
travel_to(5.days.ago) { create_commit("Test message", project, user, 'master') }
travel_to(5.days.from_now) { create_commit("Test message", project, user, 'master') }
end
it "finds the number of commits created after the 'from date'" do
@ -94,21 +94,21 @@ RSpec.describe Gitlab::CycleAnalytics::StageSummary do
end
it "doesn't find commits from other projects" do
Timecop.freeze(5.days.from_now) { create_commit("Test message", create(:project, :repository), user, 'master') }
travel_to(5.days.from_now) { create_commit("Test message", create(:project, :repository), user, 'master') }
expect(subject[:value]).to eq('-')
end
it "finds a large (> 100) number of commits if present" do
Timecop.freeze(5.days.from_now) { create_commit("Test message", project, user, 'master', count: 100) }
travel_to(5.days.from_now) { create_commit("Test message", project, user, 'master', count: 100) }
expect(subject[:value]).to eq('100')
end
context 'when `to` parameter is given' do
before do
Timecop.freeze(5.days.ago) { create_commit("Test message", project, user, 'master') }
Timecop.freeze(5.days.from_now) { create_commit("Test message", project, user, 'master') }
travel_to(5.days.ago) { create_commit("Test message", project, user, 'master') }
travel_to(5.days.from_now) { create_commit("Test message", project, user, 'master') }
end
it "doesn't find any record" do

View File

@ -1,117 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Git::CrossRepoComparer do
let(:source_project) { create(:project, :repository) }
let(:target_project) { create(:project, :repository) }
let(:source_repo) { source_project.repository.raw_repository }
let(:target_repo) { target_project.repository.raw_repository }
let(:source_branch) { 'feature' }
let(:target_branch) { 'master' }
let(:straight) { false }
let(:source_commit) { source_repo.commit(source_branch) }
let(:target_commit) { source_repo.commit(target_branch) }
subject(:result) { described_class.new(source_repo, target_repo).compare(source_branch, target_branch, straight: straight) }
describe '#compare' do
context 'within a single repository' do
let(:target_project) { source_project }
context 'a non-straight comparison' do
it 'compares without fetching from another repo' do
expect(source_repo).not_to receive(:fetch_source_branch!)
expect_compare(result, from: source_commit, to: target_commit)
expect(result.straight).to eq(false)
end
end
context 'a straight comparison' do
let(:straight) { true }
it 'compares without fetching from another repo' do
expect(source_repo).not_to receive(:fetch_source_branch!)
expect_compare(result, from: source_commit, to: target_commit)
expect(result.straight).to eq(true)
end
end
end
context 'across two repositories' do
context 'target ref exists in source repo' do
it 'compares without fetching from another repo' do
expect(source_repo).not_to receive(:fetch_source_branch!)
expect(source_repo).not_to receive(:delete_refs)
expect_compare(result, from: source_commit, to: target_commit)
end
end
context 'target ref does not exist in source repo' do
it 'compares in the source repo by fetching from the target to a temporary ref' do
new_commit_id = create_commit(target_project.owner, target_repo, target_branch)
new_commit = target_repo.commit(new_commit_id)
# This is how the temporary ref is generated
expect(SecureRandom).to receive(:hex).at_least(:once).and_return('foo')
expect(source_repo)
.to receive(:fetch_source_branch!)
.with(target_repo, new_commit_id, 'refs/tmp/foo')
.and_call_original
expect(source_repo).to receive(:delete_refs).with('refs/tmp/foo').and_call_original
expect_compare(result, from: source_commit, to: new_commit)
end
end
context 'source ref does not exist in source repo' do
let(:source_branch) { 'does-not-exist' }
it 'returns an empty comparison' do
expect(source_repo).not_to receive(:fetch_source_branch!)
expect(source_repo).not_to receive(:delete_refs)
expect(result).to be_a(::Gitlab::Git::Compare)
expect(result.commits.size).to eq(0)
end
end
context 'target ref does not exist in target repo' do
let(:target_branch) { 'does-not-exist' }
it 'returns nil' do
expect(source_repo).not_to receive(:fetch_source_branch!)
expect(source_repo).not_to receive(:delete_refs)
is_expected.to be_nil
end
end
end
end
def expect_compare(of, from:, to:)
expect(of).to be_a(::Gitlab::Git::Compare)
expect(from).to be_a(::Gitlab::Git::Commit)
expect(to).to be_a(::Gitlab::Git::Commit)
expect(of.commits).not_to be_empty
expect(of.head).to eq(from)
expect(of.base).to eq(to)
end
def create_commit(user, repo, branch)
action = { action: :create, file_path: '/FILE', content: 'content' }
result = repo.commit_files(user, branch_name: branch, message: 'Commit', actions: [action])
result.newrev
end
end

View File

@ -0,0 +1,83 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Git::CrossRepo do
let_it_be(:source_project) { create(:project, :repository) }
let_it_be(:target_project) { create(:project, :repository) }
let(:source_repo) { source_project.repository.raw_repository }
let(:target_repo) { target_project.repository.raw_repository }
let(:source_branch) { 'feature' }
let(:target_branch) { target_repo.root_ref }
let(:source_commit) { source_repo.commit(source_branch) }
let(:target_commit) { source_repo.commit(target_branch) }
def execute(&block)
described_class.new(source_repo, target_repo).execute(target_branch, &block)
end
describe '#execute' do
context 'when executed within a single repository' do
let(:target_project) { source_project }
it 'does not fetch from another repo' do
expect(source_repo).not_to receive(:fetch_source_branch!)
expect { |block| execute(&block) }.to yield_with_args(target_branch)
end
end
context 'when executed across two repositories' do
context 'and target ref exists in source repo' do
it 'does not fetch from another repo' do
expect(source_repo).not_to receive(:fetch_source_branch!)
expect(source_repo).not_to receive(:delete_refs)
expect { |block| execute(&block) }.to yield_with_args(target_commit.id)
end
end
context 'and target ref does not exist in source repo' do
let_it_be(:target_project) { create(:project, :repository) }
it 'fetches from the target to a temporary ref' do
new_commit_id = create_commit(target_project.owner, target_repo, target_branch)
# This is how the temporary ref is generated
expect(SecureRandom).to receive(:hex).at_least(:once).and_return('foo')
expect(source_repo)
.to receive(:fetch_source_branch!)
.with(target_repo, new_commit_id, 'refs/tmp/foo')
.and_call_original
expect(source_repo).to receive(:delete_refs).with('refs/tmp/foo').and_call_original
expect { |block| execute(&block) }.to yield_with_args(new_commit_id)
end
end
context 'and target ref does not exist in target repo' do
let(:target_branch) { 'does-not-exist' }
it 'returns nil' do
expect(source_repo).not_to receive(:fetch_source_branch!)
expect(source_repo).not_to receive(:delete_refs)
expect { |block| execute(&block) }.not_to yield_control
end
end
end
end
def create_commit(user, repo, branch)
action = { action: :create, file_path: '/FILE', content: 'content' }
result = repo.commit_files(user, branch_name: branch, message: 'Commit', actions: [action])
result.newrev
end
end

View File

@ -2211,15 +2211,49 @@ RSpec.describe Gitlab::Git::Repository do
end
describe '#compare_source_branch' do
it 'delegates to Gitlab::Git::CrossRepoComparer' do
expect_next_instance_of(::Gitlab::Git::CrossRepoComparer) do |instance|
expect(instance.source_repo).to eq(:source_repository)
expect(instance.target_repo).to eq(repository)
it 'compares two branches cross repo' do
mutable_repository.commit_files(
user,
branch_name: mutable_repository.root_ref, message: 'Committing something',
actions: [{ action: :create, file_path: 'encoding/CHANGELOG', content: 'New file' }]
)
expect(instance).to receive(:compare).with('feature', 'master', straight: :straight)
repository.commit_files(
user,
branch_name: repository.root_ref, message: 'Commit to root ref',
actions: [{ action: :create, file_path: 'encoding/CHANGELOG', content: 'One more' }]
)
[
[repository, mutable_repository, true],
[repository, mutable_repository, false],
[mutable_repository, repository, true],
[mutable_repository, repository, false]
].each do |source_repo, target_repo, straight|
raw_compare = target_repo.compare_source_branch(
target_repo.root_ref, source_repo, source_repo.root_ref, straight: straight)
expect(raw_compare).to be_a(::Gitlab::Git::Compare)
expect(raw_compare.commits).to eq([source_repo.commit])
expect(raw_compare.head).to eq(source_repo.commit)
expect(raw_compare.base).to eq(target_repo.commit)
expect(raw_compare.straight).to eq(straight)
end
end
repository.compare_source_branch('master', :source_repository, 'feature', straight: :straight)
context 'source ref does not exist in source repo' do
it 'returns an empty comparison' do
expect_next_instance_of(::Gitlab::Git::CrossRepo) do |instance|
expect(instance).not_to receive(:fetch_source_branch!)
end
raw_compare = repository.compare_source_branch(
repository.root_ref, mutable_repository, 'does-not-exist', straight: true)
expect(raw_compare).to be_a(::Gitlab::Git::Compare)
expect(raw_compare.commits.size).to eq(0)
end
end
end

View File

@ -4,8 +4,8 @@ require 'spec_helper'
RSpec.describe Gitlab::PumaLogging::JSONFormatter do
it "generate json format with timestamp and pid" do
Timecop.freeze( Time.utc(2019, 12, 04, 9, 10, 11, 123456)) do
expect(subject.call('log message')).to eq "{\"timestamp\":\"2019-12-04T09:10:11.123Z\",\"pid\":#{Process.pid},\"message\":\"log message\"}"
travel_to(Time.utc(2019, 12, 04, 9, 10, 11)) do
expect(subject.call('log message')).to eq "{\"timestamp\":\"2019-12-04T09:10:11.000Z\",\"pid\":#{Process.pid},\"message\":\"log message\"}"
end
end
end

View File

@ -349,6 +349,23 @@ RSpec.describe Gitlab::Workhorse do
expect(subject[:GitConfigOptions]).to be_empty
end
end
context 'when remote_ip is available in the application context' do
it 'includes a RemoteIP params' do
result = {}
Gitlab::ApplicationContext.with_context(remote_ip: "1.2.3.4") do
result = described_class.git_http_ok(repository, Gitlab::GlRepository::PROJECT, user, action)
end
expect(result[:RemoteIP]).to eql("1.2.3.4")
end
end
context 'when remote_ip is not available in the application context' do
it 'does not include RemoteIP params' do
result = described_class.git_http_ok(repository, Gitlab::GlRepository::PROJECT, user, action)
expect(result).not_to have_key(:RemoteIP)
end
end
end
describe '.set_key_and_notify' do

View File

@ -1,9 +1,11 @@
# frozen_string_literal: true
require 'json'
require 'timecop'
require 'active_support/testing/time_helpers'
RSpec.describe JSONWebToken::HMACToken do
include ActiveSupport::Testing::TimeHelpers
let(:secret) { 'shh secret squirrel' }
shared_examples 'a valid, non-expired token' do
@ -54,13 +56,13 @@ RSpec.describe JSONWebToken::HMACToken do
end
context 'that is expired' do
# Needs the ! so Timecop.freeze() is effective
# Needs the ! so freeze_time() is effective
let!(:encoded_token) { described_class.new(secret).encoded }
it "raises exception saying 'Signature has expired'" do
# Needs to be 120 seconds, because the default expiry is 60 seconds
# with an additional 60 second leeway.
Timecop.freeze(Time.now + 120) do
travel_to(Time.now + 120) do
expect { decoded_token }.to raise_error(JWT::ExpiredSignature, 'Signature has expired')
end
end
@ -77,19 +79,19 @@ RSpec.describe JSONWebToken::HMACToken do
context 'that has expired' do
let(:expire_time) { 0 }
around do |example|
travel_to(Time.now + 1) { example.run }
end
context 'with the default leeway' do
Timecop.freeze(Time.now + 1) do
it_behaves_like 'a valid, non-expired token'
end
it_behaves_like 'a valid, non-expired token'
end
context 'with a leeway of 0 seconds' do
let(:leeway) { 0 }
it "raises exception saying 'Signature has expired'" do
Timecop.freeze(Time.now + 1) do
expect { decoded_token }.to raise_error(JWT::ExpiredSignature, 'Signature has expired')
end
expect { decoded_token }.to raise_error(JWT::ExpiredSignature, 'Signature has expired')
end
end
end

View File

@ -61,7 +61,7 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do
end
it 'includes db role data and db_config_name name' do
Timecop.freeze(2021, 2, 23, 10, 0) do
travel_to(Time.utc(2021, 2, 23, 10, 0)) do
ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 1.second, '1', event_1)
ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 2.seconds, '2', event_2)
ActiveSupport::Notifications.publish('sql.active_record', Time.current, Time.current + 3.seconds, '3', event_3)

View File

@ -3010,44 +3010,6 @@ RSpec.describe Project, factory_default: :keep do
end
end
describe '#uses_external_project_ci_config?' do
subject(:uses_external_project_ci_config) { project.uses_external_project_ci_config? }
let(:project) { build(:project) }
context 'when ci_config_path is configured with external project' do
before do
project.ci_config_path = '.gitlab-ci.yml@hello/world'
end
it { is_expected.to eq(true) }
end
context 'when ci_config_path is nil' do
before do
project.ci_config_path = nil
end
it { is_expected.to eq(false) }
end
context 'when ci_config_path is configured with a file in the project' do
before do
project.ci_config_path = 'hello/world/gitlab-ci.yml'
end
it { is_expected.to eq(false) }
end
context 'when ci_config_path is configured with remote file' do
before do
project.ci_config_path = 'https://example.org/file.yml'
end
it { is_expected.to eq(false) }
end
end
describe '#latest_successful_build_for_ref' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:pipeline) { create_pipeline(project) }
@ -7518,15 +7480,6 @@ RSpec.describe Project, factory_default: :keep do
end
end
describe '#ci_config_external_project' do
subject(:ci_config_external_project) { project.ci_config_external_project }
let(:other_project) { create(:project) }
let(:project) { build(:project, ci_config_path: ".gitlab-ci.yml@#{other_project.full_path}") }
it { is_expected.to eq(other_project) }
end
describe '#enabled_group_deploy_keys' do
let_it_be(:project) { create(:project) }

View File

@ -61,6 +61,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
expect(json_response['inactive_projects_min_size_mb']).to eq(0)
expect(json_response['inactive_projects_send_warning_email_after_months']).to eq(1)
expect(json_response['can_create_group']).to eq(true)
expect(json_response['jira_connect_application_key']).to eq(nil)
expect(json_response['jira_connect_proxy_url']).to eq(nil)
end
end
@ -158,7 +160,9 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
inactive_projects_delete_after_months: 24,
inactive_projects_min_size_mb: 10,
inactive_projects_send_warning_email_after_months: 12,
can_create_group: false
can_create_group: false,
jira_connect_application_key: '123',
jira_connect_proxy_url: 'http://example.com'
}
expect(response).to have_gitlab_http_status(:ok)
@ -220,6 +224,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
expect(json_response['inactive_projects_min_size_mb']).to eq(10)
expect(json_response['inactive_projects_send_warning_email_after_months']).to eq(12)
expect(json_response['can_create_group']).to eq(false)
expect(json_response['jira_connect_application_key']).to eq('123')
expect(json_response['jira_connect_proxy_url']).to eq('http://example.com')
end
end

View File

@ -80,7 +80,7 @@ RSpec.describe API::UsageDataQueries do
end
it 'matches the generated query' do
Timecop.freeze(2021, 1, 1) do
travel_to(Time.utc(2021, 1, 1)) do
get api(endpoint, admin)
end

View File

@ -48,7 +48,7 @@ RSpec.describe EntityDateHelper do
describe '#remaining_days_in_words' do
around do |example|
Timecop.freeze(Time.utc(2017, 3, 17)) { example.run }
travel_to(Time.utc(2017, 3, 17)) { example.run }
end
context 'when less than 31 days remaining' do
@ -75,7 +75,9 @@ RSpec.describe EntityDateHelper do
end
it 'returns 1 day remaining when queried mid-day' do
Timecop.freeze(Time.utc(2017, 3, 17, 13, 10)) do
travel_back
travel_to(Time.utc(2017, 3, 17, 13, 10)) do
expect(milestone_remaining).to eq("<strong>1</strong> day remaining")
end
end

View File

@ -912,7 +912,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
context 'when outside freeze period' do
it 'creates two jobs' do
Timecop.freeze(2020, 4, 10, 22, 59) do
travel_to(Time.utc(2020, 4, 10, 22, 59)) do
expect(pipeline).to be_persisted
expect(build_names).to contain_exactly('test-job', 'deploy-job')
end
@ -921,7 +921,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
context 'when inside freeze period' do
it 'creates one job' do
Timecop.freeze(2020, 4, 10, 23, 1) do
travel_to(Time.utc(2020, 4, 10, 23, 1)) do
expect(pipeline).to be_persisted
expect(build_names).to contain_exactly('test-job')
end
@ -946,7 +946,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
context 'when outside freeze period' do
it 'creates two jobs' do
Timecop.freeze(2020, 4, 10, 22, 59) do
travel_to(Time.utc(2020, 4, 10, 22, 59)) do
expect(pipeline).to be_persisted
expect(build_names).to contain_exactly('deploy-job')
end
@ -955,7 +955,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
context 'when inside freeze period' do
it 'does not create the pipeline', :aggregate_failures do
Timecop.freeze(2020, 4, 10, 23, 1) do
travel_to(Time.utc(2020, 4, 10, 23, 1)) do
expect(response).to be_error
expect(pipeline).not_to be_persisted
end

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
# This shared_example requires the following variables:
# - text: The text to be run through the filter
#
# Usage:
#
# it_behaves_like 'filter timeout' do
# let(:text) { 'some text' }
# end
RSpec.shared_examples 'filter timeout' do
context 'when rendering takes too long' do
let_it_be(:project) { create(:project) }
let_it_be(:context) { { project: project } }
it 'times out' do
stub_const("Banzai::Filter::TimeoutHtmlPipelineFilter::RENDER_TIMEOUT", 0.1)
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:call_with_timeout) do
sleep(0.2)
text
end
end
expect(Gitlab::RenderTimeout).to receive(:timeout).and_call_original
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
instance_of(Timeout::Error),
project_id: context[:project].id,
class_name: described_class.name.demodulize
)
result = filter(text)
expect(result.to_html).to eq text
end
end
end

View File

@ -42,17 +42,17 @@ module CycleAnalyticsHelpers
end_time = start_time + rand(1..5).days
start_time_conditions.each do |condition_name, condition_fn|
Timecop.freeze(start_time) { condition_fn[self, data] }
travel_to(start_time) { condition_fn[self, data] }
end
# Run `before_end_fn` at the midpoint between `start_time` and `end_time`
Timecop.freeze(start_time + (end_time - start_time) / 2) { before_end_fn[self, data] } if before_end_fn
travel_to(start_time + (end_time - start_time) / 2) { before_end_fn[self, data] } if before_end_fn
end_time_conditions.each do |condition_name, condition_fn|
Timecop.freeze(end_time) { condition_fn[self, data] }
travel_to(end_time) { condition_fn[self, data] }
end
Timecop.freeze(end_time + 1.day) { post_fn[self, data] } if post_fn
travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn
end_time - start_time
end
@ -74,14 +74,14 @@ module CycleAnalyticsHelpers
end_time = rand(1..10).days.from_now
start_time_conditions.each do |condition_name, condition_fn|
Timecop.freeze(start_time) { condition_fn[self, data] }
travel_to(start_time) { condition_fn[self, data] }
end
end_time_conditions.each do |condition_name, condition_fn|
Timecop.freeze(end_time) { condition_fn[self, data] }
travel_to(end_time) { condition_fn[self, data] }
end
Timecop.freeze(end_time + 1.day) { post_fn[self, data] } if post_fn
travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn
# Turn off the stub before checking assertions
allow(self).to receive(:project).and_call_original
@ -97,17 +97,17 @@ module CycleAnalyticsHelpers
end_time = start_time + rand(1..5).days
# Run `before_end_fn` at the midpoint between `start_time` and `end_time`
Timecop.freeze(start_time + (end_time - start_time) / 2) { before_end_fn[self, data] } if before_end_fn
travel_to(start_time + (end_time - start_time) / 2) { before_end_fn[self, data] } if before_end_fn
end_time_conditions.each do |condition_name, condition_fn|
Timecop.freeze(start_time) { condition_fn[self, data] }
travel_to(start_time) { condition_fn[self, data] }
end
start_time_conditions.each do |condition_name, condition_fn|
Timecop.freeze(end_time) { condition_fn[self, data] }
travel_to(end_time) { condition_fn[self, data] }
end
Timecop.freeze(end_time + 1.day) { post_fn[self, data] } if post_fn
travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn
expect(subject[phase].project_median).to be_nil
end
@ -122,10 +122,10 @@ module CycleAnalyticsHelpers
end_time = rand(1..10).days.from_now
end_time_conditions.each_with_index do |(_condition_name, condition_fn), index|
Timecop.freeze(end_time + index.days) { condition_fn[self, data] }
travel_to(end_time + index.days) { condition_fn[self, data] }
end
Timecop.freeze(end_time + 1.day) { post_fn[self, data] } if post_fn
travel_to(end_time + 1.day) { post_fn[self, data] } if post_fn
expect(subject[phase].project_median).to be_nil
end
@ -139,7 +139,7 @@ module CycleAnalyticsHelpers
start_time = Time.now
start_time_conditions.each do |condition_name, condition_fn|
Timecop.freeze(start_time) { condition_fn[self, data] }
travel_to(start_time) { condition_fn[self, data] }
end
post_fn[self, data] if post_fn

View File

@ -50,7 +50,7 @@ module Spec
page.within(search_bar_selector) do
click_on filter
# For OPERATOR_IS_ONLY, clicking the filter
# For OPERATORS_IS, clicking the filter
# immediately preselects "=" operator
page.find('input').send_keys(value)

View File

@ -3,12 +3,14 @@
require 'action_dispatch/testing/test_request'
require 'fileutils'
require 'graphlyte'
require 'active_support/testing/time_helpers'
require_relative '../../../lib/gitlab/popen'
module JavaScriptFixturesHelpers
extend ActiveSupport::Concern
include Gitlab::Popen
include ActiveSupport::Testing::TimeHelpers
extend self
@ -22,7 +24,7 @@ module JavaScriptFixturesHelpers
# pick an arbitrary date from the past, so tests are not time dependent
# Also see spec/frontend/__helpers__/fake_date/jest.js
Timecop.freeze(Time.utc(2015, 7, 3, 10)) { example.run }
travel_to(Time.utc(2015, 7, 3, 10)) { example.run }
raise NoMethodError.new('You need to set `response` for the fixture generator! This will automatically happen with `type: :controller` or `type: :request`.', 'response') unless respond_to?(:response)

View File

@ -6,7 +6,7 @@ RSpec.shared_context 'rack attack cache store' do
Rack::Attack.cache.store = ActiveSupport::Cache::MemoryStore.new
# Make time-dependent tests deterministic
Timecop.freeze { example.run }
freeze_time { example.run }
Rack::Attack.cache.store = Rails.cache
end

View File

@ -68,6 +68,7 @@ RSpec.shared_examples 'rate-limited token requests' do
# Set low limits
settings_to_set[:"#{throttle_setting_prefix}_requests_per_period"] = requests_per_period
settings_to_set[:"#{throttle_setting_prefix}_period_in_seconds"] = period_in_seconds
travel_back
end
after do
@ -220,6 +221,7 @@ RSpec.shared_examples 'rate-limited web authenticated requests' do
# Set low limits
settings_to_set[:"#{throttle_setting_prefix}_requests_per_period"] = requests_per_period
settings_to_set[:"#{throttle_setting_prefix}_period_in_seconds"] = period_in_seconds
travel_back
end
after do
@ -436,6 +438,7 @@ RSpec.shared_examples 'rate-limited unauthenticated requests' do
# Set low limits
settings_to_set[:"#{throttle_setting_prefix}_requests_per_period"] = requests_per_period
settings_to_set[:"#{throttle_setting_prefix}_period_in_seconds"] = period_in_seconds
travel_back
end
context 'when the throttle is enabled' do

View File

@ -10,23 +10,24 @@ RSpec.describe Metrics::Dashboard::PruneOldAnnotationsWorker do
describe '#perform' do
it 'removes all annotations older than cut off', :aggregate_failures do
Timecop.freeze(now) do
travel_to(now) do
described_class.new.perform
expect(Metrics::Dashboard::Annotation.all).to match_array([one_day_old_annotation, two_weeks_old_annotation])
# is idempotent in the scope of 24h
expect { described_class.new.perform }.not_to change { Metrics::Dashboard::Annotation.all.to_a }
travel_to(24.hours.from_now) do
described_class.new.perform
expect(Metrics::Dashboard::Annotation.all).to match_array([one_day_old_annotation])
end
end
travel_to(now + 24.hours) do
described_class.new.perform
expect(Metrics::Dashboard::Annotation.all).to match_array([one_day_old_annotation])
end
end
context 'batch to be deleted is bigger than upper limit' do
it 'schedules second job to clear remaining records' do
Timecop.freeze(now) do
travel_to(now) do
create(:metrics_dashboard_annotation, starting_at: 1.month.ago)
stub_const("#{described_class}::DELETE_LIMIT", 1)

View File

@ -272,6 +272,7 @@ func TestPostReceivePackProxiedToGitalySuccessfully(t *testing.T) {
require.Equal(t, apiResponse.Repository.RelativePath, gitalyRequest.Repository.RelativePath)
require.Equal(t, apiResponse.GL_ID, gitalyRequest.GlId)
require.Equal(t, apiResponse.GL_USERNAME, gitalyRequest.GlUsername)
require.Equal(t, apiResponse.RemoteIp, "1.2.3.4")
require.Equal(t, apiResponse.GitConfigOptions, gitalyRequest.GitConfigOptions)
require.Equal(t, gitProtocol, gitalyRequest.GitProtocol)

View File

@ -128,6 +128,10 @@ type Response struct {
// GL_REPOSITORY is an environment variable used by gitlab-shell hooks during
// 'git push' and 'git pull'
GL_REPOSITORY string
// RemoteIp holds the IP of the request issuing the action
RemoteIp string
// GitConfigOptions holds the custom options that we want to pass to the git command
GitConfigOptions []string
// StoreLFSPath is provided by the GitLab Rails application to mark where the tmp file should be placed.

View File

@ -59,8 +59,7 @@ func handleGetInfoRefsWithGitaly(ctx context.Context, responseWriter *HttpRespon
ctx,
a.GitalyServer,
gitaly.WithFeatures(a.GitalyServer.Features),
gitaly.WithUserID(a.GL_ID),
gitaly.WithUsername(a.GL_USERNAME),
gitaly.WithLoggingMetadata(a),
)
if err != nil {
return err

View File

@ -24,8 +24,7 @@ func handleReceivePack(w *HttpResponseWriter, r *http.Request, a *api.Response)
r.Context(),
a.GitalyServer,
gitaly.WithFeatures(a.GitalyServer.Features),
gitaly.WithUserID(a.GL_ID),
gitaly.WithUsername(a.GL_USERNAME),
gitaly.WithLoggingMetadata(a),
)
if err != nil {
return fmt.Errorf("smarthttp.ReceivePack: %v", err)

View File

@ -48,8 +48,7 @@ func handleUploadPackWithGitaly(ctx context.Context, a *api.Response, clientRequ
ctx,
a.GitalyServer,
gitaly.WithFeatures(a.GitalyServer.Features),
gitaly.WithUserID(a.GL_ID),
gitaly.WithUsername(a.GL_USERNAME),
gitaly.WithLoggingMetadata(a),
)
if err != nil {
return fmt.Errorf("get gitaly client: %w", err)

View File

@ -69,18 +69,6 @@ func InitializeSidechannelRegistry(logger *logrus.Logger) {
type MetadataFunc func(metadata.MD)
func WithUserID(userID string) MetadataFunc {
return func(md metadata.MD) {
md.Append("user_id", userID)
}
}
func WithUsername(username string) MetadataFunc {
return func(md metadata.MD) {
md.Append("username", username)
}
}
func WithFeatures(features map[string]string) MetadataFunc {
return func(md metadata.MD) {
for k, v := range features {
@ -92,6 +80,20 @@ func WithFeatures(features map[string]string) MetadataFunc {
}
}
func WithLoggingMetadata(r *api.Response) MetadataFunc {
return func(md metadata.MD) {
if r.GL_ID != "" {
md.Append("user_id", r.GL_ID)
}
if r.GL_USERNAME != "" {
md.Append("username", r.GL_USERNAME)
}
if r.RemoteIp != "" {
md.Append("remote_ip", r.RemoteIp)
}
}
}
func withOutgoingMetadata(ctx context.Context, addMetadataFuncs ...MetadataFunc) context.Context {
md := metadata.New(nil)

View File

@ -22,12 +22,16 @@ func TestNewSmartHTTPClient(t *testing.T) {
context.Background(),
serverFixture(),
WithFeatures(features()),
WithUsername("gl_username"),
WithUserID("gl_id"),
WithLoggingMetadata(&api.Response{
GL_USERNAME: "gl_username",
GL_ID: "gl_id",
RemoteIp: "1.2.3.4",
}),
)
require.NoError(t, err)
testOutgoingMetadata(t, ctx)
testOutgoingIDAndUsername(t, ctx)
testOutgoingRemoteIP(t, ctx)
require.NotNil(t, client.sidechannelRegistry)
}
@ -95,6 +99,13 @@ func testOutgoingIDAndUsername(t *testing.T, ctx context.Context) {
require.Equal(t, md["username"], []string{"gl_username"})
}
func testOutgoingRemoteIP(t *testing.T, ctx context.Context) {
md, ok := metadata.FromOutgoingContext(ctx)
require.True(t, ok, "get metadata from context")
require.Equal(t, md["remote_ip"], []string{"1.2.3.4"})
}
func features() map[string]string {
features := make(map[string]string)
for k, v := range allowedFeatures() {

View File

@ -813,6 +813,7 @@ func gitOkBody(t *testing.T) *api.Response {
return &api.Response{
GL_ID: "user-123",
GL_USERNAME: "username",
RemoteIp: "1.2.3.4",
Repository: gitalypb.Repository{
StorageName: "default",
RelativePath: "foo/bar.git",