Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
af018b61ed
commit
da73b48d56
|
|
@ -5,6 +5,9 @@ Gitlab/FeatureFlagKeyDynamic:
|
|||
Exclude:
|
||||
- 'app/graphql/resolvers/app_config/gitlab_instance_feature_flags_resolver.rb'
|
||||
- 'app/graphql/resolvers/feature_flag_resolver.rb'
|
||||
- 'app/models/concerns/ci/partitionable/switch.rb'
|
||||
- 'app/models/concerns/limitable.rb'
|
||||
- 'app/models/group.rb'
|
||||
- 'app/services/concerns/measurable.rb'
|
||||
- 'app/services/service_desk_settings/update_service.rb'
|
||||
- 'app/workers/concerns/worker_attributes.rb'
|
||||
|
|
@ -13,6 +16,7 @@ Gitlab/FeatureFlagKeyDynamic:
|
|||
- 'ee/app/graphql/resolvers/ai/user_available_features_resolver.rb'
|
||||
- 'ee/app/graphql/resolvers/ai/user_code_suggestions_contexts_resolver.rb'
|
||||
- 'ee/app/models/concerns/geo/verifiable_replicator.rb'
|
||||
- 'ee/app/models/members/member_role.rb'
|
||||
- 'ee/app/services/search/zoekt/info_service.rb'
|
||||
- 'ee/lib/gitlab/ai_gateway.rb'
|
||||
- 'ee/lib/gitlab/geo/replicator.rb'
|
||||
|
|
@ -20,12 +24,17 @@ Gitlab/FeatureFlagKeyDynamic:
|
|||
- 'ee/lib/gitlab/llm/completions_factory.rb'
|
||||
- 'ee/lib/tasks/gitlab/nav/variant_generator.rb'
|
||||
- 'ee/spec/graphql/resolvers/ai/user_available_features_resolver_spec.rb'
|
||||
- 'ee/spec/lib/gitlab/duo/developments/setup_spec.rb'
|
||||
- 'ee/spec/models/gitlab_subscriptions/features_spec.rb'
|
||||
- 'lib/feature/gitaly.rb'
|
||||
- 'lib/gitlab/ci/config/feature_flags.rb'
|
||||
- 'lib/gitlab/experiment_feature_rollout.rb'
|
||||
- 'lib/gitlab/gon_helper.rb'
|
||||
- 'lib/gitlab/metrics/methods.rb'
|
||||
- 'lib/gitlab/redis/multi_store.rb'
|
||||
- 'lib/gitlab/sidekiq_middleware/skip_jobs.rb'
|
||||
- 'lib/gitlab/sidekiq_sharding/router.rb'
|
||||
- 'lib/gitlab/tracking/event_eligibility_checker.rb'
|
||||
- 'lib/web_ide/extension_marketplace.rb'
|
||||
- 'spec/lib/feature_spec.rb'
|
||||
- 'spec/requests/api/features_spec.rb'
|
||||
|
|
|
|||
|
|
@ -296,8 +296,6 @@ Gitlab/StrongMemoizeAttr:
|
|||
- 'ee/app/services/ee/projects/create_from_template_service.rb'
|
||||
- 'ee/app/services/ee/projects/gitlab_projects_import_service.rb'
|
||||
- 'ee/app/services/ee/protected_branches/create_service.rb'
|
||||
- 'ee/app/services/ee/search/global_service.rb'
|
||||
- 'ee/app/services/ee/search/group_service.rb'
|
||||
- 'ee/app/services/ee/users/authorized_build_service.rb'
|
||||
- 'ee/app/services/ee/users/build_service.rb'
|
||||
- 'ee/app/services/ee/users/update_service.rb'
|
||||
|
|
|
|||
|
|
@ -43,7 +43,6 @@ Lint/SymbolConversion:
|
|||
- 'ee/lib/ee/api/helpers.rb'
|
||||
- 'ee/lib/gitlab/geo/replicator.rb'
|
||||
- 'ee/lib/gitlab/graphql/aggregations/epics/epic_node.rb'
|
||||
- 'ee/lib/search/zoekt/search_results.rb'
|
||||
- 'ee/spec/factories/ci/builds.rb'
|
||||
- 'ee/spec/factories/ci/pipelines.rb'
|
||||
- 'ee/spec/features/groups/analytics/cycle_analytics/charts_spec.rb'
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ RSpec/ReceiveMessages:
|
|||
- 'ee/spec/features/groups/iterations/user_edits_iteration_cadence_spec.rb'
|
||||
- 'ee/spec/features/projects/new_project_spec.rb'
|
||||
- 'ee/spec/features/projects/settings/ee/service_desk_setting_spec.rb'
|
||||
- 'ee/spec/features/search/elastic/project_search_spec.rb'
|
||||
- 'ee/spec/finders/security/security_policies_finder_spec.rb'
|
||||
- 'ee/spec/frontend/fixtures/epic.rb'
|
||||
- 'ee/spec/graphql/resolvers/epic_issues_resolver_spec.rb'
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
bde1da2a11f978f937589b277c2b5bd5711036ed
|
||||
6a69fdde9d24d290cc33e48d7062a57da2f0c508
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
69d41543ba9e650df1415c0f96a6df6d1cb3c352
|
||||
7e94489bc9d892e3cb25f9f9e7f4f7ce15ac0ee8
|
||||
|
|
|
|||
|
|
@ -455,7 +455,7 @@
|
|||
{"name":"omniauth-google-oauth2","version":"1.1.1","platform":"ruby","checksum":"4496f126e84eaf760f9c6a5c69e5e7511f98092d7f25ad79fd2c0ae5e09b5039"},
|
||||
{"name":"omniauth-oauth2","version":"1.8.0","platform":"ruby","checksum":"b2f8e9559cc7e2d4efba57607691d6d2b634b879fc5b5b6ccfefa3da85089e78"},
|
||||
{"name":"omniauth-oauth2-generic","version":"0.2.8","platform":"ruby","checksum":"ce6e8539019d5ebf2f48867072b9f248f148bb4cbe7166dee655865abfae7613"},
|
||||
{"name":"omniauth-saml","version":"2.2.2","platform":"ruby","checksum":"84cc650ac2d684fcd430a5a14dfbfa0f8677eebaf0bd9d86ab17b520edd0fbb3"},
|
||||
{"name":"omniauth-saml","version":"2.2.3","platform":"ruby","checksum":"d4e0dbdcb304e4bb74410eb75deaa1873b08a42afa7634c9c3171be0b34751b0"},
|
||||
{"name":"omniauth-shibboleth-redux","version":"2.0.0","platform":"ruby","checksum":"e9b353fd103405fcc8549e8510b9cad857acf0b286d764fac5dba8a93ab8ffe1"},
|
||||
{"name":"omniauth_openid_connect","version":"0.8.0","platform":"ruby","checksum":"1f2f3890386e2a742221cee0d2e903b78d874e6fab9ea3bfa31c1462f4793d25"},
|
||||
{"name":"open4","version":"1.3.4","platform":"ruby","checksum":"a1df037310624ecc1ea1d81264b11c83e96d0c3c1c6043108d37d396dcd0f4b1"},
|
||||
|
|
|
|||
|
|
@ -1281,9 +1281,9 @@ GEM
|
|||
omniauth-oauth2-generic (0.2.8)
|
||||
omniauth-oauth2 (~> 1.0)
|
||||
rake
|
||||
omniauth-saml (2.2.2)
|
||||
omniauth-saml (2.2.3)
|
||||
omniauth (~> 2.1)
|
||||
ruby-saml (~> 1.17)
|
||||
ruby-saml (~> 1.18)
|
||||
omniauth-shibboleth-redux (2.0.0)
|
||||
omniauth (>= 2.0.0)
|
||||
omniauth_openid_connect (0.8.0)
|
||||
|
|
|
|||
|
|
@ -458,7 +458,7 @@
|
|||
{"name":"omniauth-google-oauth2","version":"1.1.1","platform":"ruby","checksum":"4496f126e84eaf760f9c6a5c69e5e7511f98092d7f25ad79fd2c0ae5e09b5039"},
|
||||
{"name":"omniauth-oauth2","version":"1.8.0","platform":"ruby","checksum":"b2f8e9559cc7e2d4efba57607691d6d2b634b879fc5b5b6ccfefa3da85089e78"},
|
||||
{"name":"omniauth-oauth2-generic","version":"0.2.8","platform":"ruby","checksum":"ce6e8539019d5ebf2f48867072b9f248f148bb4cbe7166dee655865abfae7613"},
|
||||
{"name":"omniauth-saml","version":"2.2.2","platform":"ruby","checksum":"84cc650ac2d684fcd430a5a14dfbfa0f8677eebaf0bd9d86ab17b520edd0fbb3"},
|
||||
{"name":"omniauth-saml","version":"2.2.3","platform":"ruby","checksum":"d4e0dbdcb304e4bb74410eb75deaa1873b08a42afa7634c9c3171be0b34751b0"},
|
||||
{"name":"omniauth-shibboleth-redux","version":"2.0.0","platform":"ruby","checksum":"e9b353fd103405fcc8549e8510b9cad857acf0b286d764fac5dba8a93ab8ffe1"},
|
||||
{"name":"omniauth_openid_connect","version":"0.8.0","platform":"ruby","checksum":"1f2f3890386e2a742221cee0d2e903b78d874e6fab9ea3bfa31c1462f4793d25"},
|
||||
{"name":"open4","version":"1.3.4","platform":"ruby","checksum":"a1df037310624ecc1ea1d81264b11c83e96d0c3c1c6043108d37d396dcd0f4b1"},
|
||||
|
|
@ -602,7 +602,7 @@
|
|||
{"name":"regexp_parser","version":"2.10.0","platform":"ruby","checksum":"cb6f0ddde88772cd64bff1dbbf68df66d376043fe2e66a9ef77fcb1b0c548c61"},
|
||||
{"name":"regexp_property_values","version":"1.0.0","platform":"java","checksum":"5e26782b01241616855c4ee7bb8a62fce9387e484f2d3eaf04f2a0633708222e"},
|
||||
{"name":"regexp_property_values","version":"1.0.0","platform":"ruby","checksum":"162499dc0bba1e66d334273a059f207a61981cc8cc69d2ca743594e7886d080f"},
|
||||
{"name":"reline","version":"0.6.0","platform":"ruby","checksum":"57620375dcbe56ec09bac7192bfb7460c716bbf0054dc94345ecaa5438e539d2"},
|
||||
{"name":"reline","version":"0.6.1","platform":"ruby","checksum":"1afcc9d7cb1029cdbe780d72f2f09251ce46d3780050f3ec39c3ccc6b60675fb"},
|
||||
{"name":"representable","version":"3.2.0","platform":"ruby","checksum":"cc29bf7eebc31653586849371a43ffe36c60b54b0a6365b5f7d95ec34d1ebace"},
|
||||
{"name":"request_store","version":"1.7.0","platform":"ruby","checksum":"e1b75d5346a315f452242a68c937ef8e48b215b9453a77a6c0acdca2934c88cb"},
|
||||
{"name":"responders","version":"3.0.1","platform":"ruby","checksum":"613fe28e498987f4feaa3230aa6313ca4bd5f0563a3da83511b0dd6cd8f47292"},
|
||||
|
|
|
|||
|
|
@ -1298,9 +1298,9 @@ GEM
|
|||
omniauth-oauth2-generic (0.2.8)
|
||||
omniauth-oauth2 (~> 1.0)
|
||||
rake
|
||||
omniauth-saml (2.2.2)
|
||||
omniauth-saml (2.2.3)
|
||||
omniauth (~> 2.1)
|
||||
ruby-saml (~> 1.17)
|
||||
ruby-saml (~> 1.18)
|
||||
omniauth-shibboleth-redux (2.0.0)
|
||||
omniauth (>= 2.0.0)
|
||||
omniauth_openid_connect (0.8.0)
|
||||
|
|
@ -1624,7 +1624,7 @@ GEM
|
|||
redis (>= 4, < 6)
|
||||
regexp_parser (2.10.0)
|
||||
regexp_property_values (1.0.0)
|
||||
reline (0.6.0)
|
||||
reline (0.6.1)
|
||||
io-console (~> 0.5)
|
||||
representable (3.2.0)
|
||||
declarative (< 0.1.0)
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ export default {
|
|||
<template>
|
||||
<div>
|
||||
<div class="gl-pt-4">
|
||||
<dl class="runner-details-grid-template gl-mb-0 gl-grid">
|
||||
<dl class="gl-mb-0 gl-grid gl-grid-cols-[auto_1fr]">
|
||||
<runner-detail :label="s__('Runners|Description')" :value="runner.description" />
|
||||
<runner-detail
|
||||
:label="s__('Runners|Last contact')"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,34 @@
|
|||
import Vue from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import VueRouter from 'vue-router';
|
||||
import createDefaultClient from '~/lib/graphql';
|
||||
import ProjectRunnerShowApp from './project_runner_show_app.vue';
|
||||
|
||||
Vue.use(VueApollo);
|
||||
Vue.use(VueRouter);
|
||||
|
||||
export const initProjectRunnerShow = (selector = '#js-project-runner-show') => {
|
||||
const el = document.querySelector(selector);
|
||||
|
||||
if (!el) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const { runnerId } = el.dataset;
|
||||
|
||||
const apolloProvider = new VueApollo({
|
||||
defaultClient: createDefaultClient(),
|
||||
});
|
||||
|
||||
return new Vue({
|
||||
el,
|
||||
apolloProvider,
|
||||
render(h) {
|
||||
return h(ProjectRunnerShowApp, {
|
||||
props: {
|
||||
runnerId,
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
};
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
<script>
|
||||
import { createAlert } from '~/alert';
|
||||
import { TYPENAME_CI_RUNNER } from '~/graphql_shared/constants';
|
||||
import { convertToGraphQLId } from '~/graphql_shared/utils';
|
||||
|
||||
import RunnerHeader from '../components/runner_header.vue';
|
||||
import RunnerDetailsTabs from '../components/runner_details_tabs.vue';
|
||||
|
||||
import { I18N_FETCH_ERROR } from '../constants';
|
||||
import runnerQuery from '../graphql/show/runner.query.graphql';
|
||||
import { captureException } from '../sentry_utils';
|
||||
|
||||
export default {
|
||||
name: 'ProjectRunnerShowApp',
|
||||
components: {
|
||||
RunnerHeader,
|
||||
RunnerDetailsTabs,
|
||||
},
|
||||
props: {
|
||||
runnerId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
runner: null,
|
||||
};
|
||||
},
|
||||
apollo: {
|
||||
runner: {
|
||||
query: runnerQuery,
|
||||
variables() {
|
||||
return {
|
||||
id: convertToGraphQLId(TYPENAME_CI_RUNNER, this.runnerId),
|
||||
};
|
||||
},
|
||||
error(error) {
|
||||
createAlert({ message: I18N_FETCH_ERROR });
|
||||
|
||||
this.reportToSentry(error);
|
||||
},
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
reportToSentry(error) {
|
||||
captureException({ error, component: this.$options.name });
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<div>
|
||||
<runner-header v-if="runner" :runner="runner" />
|
||||
<runner-details-tabs v-if="runner" :runner="runner" />
|
||||
</div>
|
||||
</template>
|
||||
|
|
@ -201,7 +201,7 @@ export default (
|
|||
};
|
||||
|
||||
const deployedBeforeToken = {
|
||||
formattedKey: __('Deployed-before'),
|
||||
formattedKey: __('Deployed before'),
|
||||
key: 'deployed-before',
|
||||
type: 'string',
|
||||
param: '',
|
||||
|
|
@ -211,7 +211,7 @@ export default (
|
|||
};
|
||||
|
||||
const deployedAfterToken = {
|
||||
formattedKey: __('Deployed-after'),
|
||||
formattedKey: __('Deployed after'),
|
||||
key: 'deployed-after',
|
||||
type: 'string',
|
||||
param: '',
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import { loadingIconForLegacyJS } from '~/loading_icon_for_legacy_js';
|
|||
import { s__, __, sprintf } from '~/locale';
|
||||
import { isUserBusy } from '~/set_status_modal/utils';
|
||||
import SidebarMediator from '~/sidebar/sidebar_mediator';
|
||||
import { linkedItems } from '~/graphql_shared/issuable_client';
|
||||
import { currentAssignees, linkedItems } from '~/graphql_shared/issuable_client';
|
||||
import { state } from '~/sidebar/components/reviewers/sidebar_reviewers.vue';
|
||||
import {
|
||||
ISSUABLE_EPIC,
|
||||
|
|
@ -182,6 +182,18 @@ export const defaultAutocompleteConfig = {
|
|||
|
||||
class GfmAutoComplete {
|
||||
constructor(dataSources = {}) {
|
||||
// Ensure that all possible work item paths are included
|
||||
const { page } = document.body.dataset;
|
||||
this.isWorkItemsView =
|
||||
(gon.current_user_use_work_items_view || gon.features?.workItemViewForIssues) &&
|
||||
(page.includes('groups:work_items') ||
|
||||
page.includes('projects:work_items') ||
|
||||
page.includes('groups:issues') ||
|
||||
page.includes('projects:issues') ||
|
||||
page.includes('groups:epics') ||
|
||||
page.includes('issues:show') ||
|
||||
page.includes('epics:show'));
|
||||
|
||||
this.dataSources = dataSources;
|
||||
this.cachedData = {};
|
||||
this.isLoadingData = {};
|
||||
|
|
@ -454,8 +466,13 @@ class GfmAutoComplete {
|
|||
});
|
||||
|
||||
// Cache assignees & reviewers list for easier filtering later
|
||||
assignees =
|
||||
SidebarMediator.singleton?.store?.assignees?.map(createMemberSearchString) || [];
|
||||
if (instance.isWorkItemsView) {
|
||||
const { workItemId } = this.$inputor.get(0).closest('.js-gfm-wrapper').dataset;
|
||||
assignees = (currentAssignees()[`${workItemId}`] || []).map(createMemberSearchString);
|
||||
} else {
|
||||
assignees =
|
||||
SidebarMediator.singleton?.store?.assignees?.map(createMemberSearchString) || [];
|
||||
}
|
||||
reviewers = state.issuable?.reviewers?.nodes?.map(createMemberSearchString) || [];
|
||||
|
||||
const match = GfmAutoComplete.defaultMatcher(flag, subtext, this.app.controllers);
|
||||
|
|
@ -562,14 +579,10 @@ class GfmAutoComplete {
|
|||
return match && match.length ? match[1] : null;
|
||||
},
|
||||
filter(query, data) {
|
||||
// Limit enhanced /unlink to only Work Items for now.
|
||||
const hasWorkItemIssuesEnabled =
|
||||
gon.current_user_use_work_items_view || gon.features.workItemViewForIssues;
|
||||
if (hasWorkItemIssuesEnabled && command === MEMBER_COMMAND.UNLINK) {
|
||||
if (instance.isWorkItemsView && command === MEMBER_COMMAND.UNLINK) {
|
||||
const { workItemFullPath, workItemIid } = this.$inputor
|
||||
.get(0)
|
||||
.closest('section')
|
||||
.querySelector('#linkeditems').dataset;
|
||||
.closest('.js-gfm-wrapper').dataset;
|
||||
|
||||
// Only include items which are linked to the Issuable currently
|
||||
// if `#` is followed by `/unlink` command.
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ import {
|
|||
WIDGET_TYPE_AWARD_EMOJI,
|
||||
WIDGET_TYPE_HIERARCHY,
|
||||
WIDGET_TYPE_LINKED_ITEMS,
|
||||
WIDGET_TYPE_ASSIGNEES,
|
||||
} from '~/work_items/constants';
|
||||
|
||||
import isExpandedHierarchyTreeChildQuery from '~/work_items/graphql/client/is_expanded_hierarchy_tree_child.query.graphql';
|
||||
|
|
@ -22,6 +23,7 @@ import { updateNewWorkItemCache, workItemBulkEdit } from '~/work_items/graphql/r
|
|||
import { preserveDetailsState } from '~/work_items/utils';
|
||||
|
||||
export const linkedItems = makeVar({});
|
||||
export const currentAssignees = makeVar({});
|
||||
|
||||
export const config = {
|
||||
typeDefs,
|
||||
|
|
@ -200,10 +202,10 @@ export const config = {
|
|||
return existingWidget;
|
||||
}
|
||||
|
||||
const incomindNodes = incomingWidget.linkedItems?.nodes || [];
|
||||
const incomingNodes = incomingWidget.linkedItems?.nodes || [];
|
||||
const existingNodes = existingWidget.linkedItems?.nodes || [];
|
||||
|
||||
const resultNodes = incomindNodes.map((incomingNode) => {
|
||||
const resultNodes = incomingNodes.map((incomingNode) => {
|
||||
const existingNode =
|
||||
existingNodes.find((n) => n.linkId === incomingNode.linkId) ?? {};
|
||||
return { ...existingNode, ...incomingNode };
|
||||
|
|
@ -234,6 +236,20 @@ export const config = {
|
|||
};
|
||||
}
|
||||
|
||||
if (existingWidget?.type === WIDGET_TYPE_ASSIGNEES && context.variables.id) {
|
||||
const workItemAssignees = existingWidget.assignees?.nodes || [];
|
||||
const users = workItemAssignees.map(
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
(user) => context.cache.extract()[user.__ref],
|
||||
);
|
||||
|
||||
const existingAssignees = currentAssignees();
|
||||
currentAssignees({
|
||||
...existingAssignees,
|
||||
[`${context.variables.id}`]: users,
|
||||
});
|
||||
}
|
||||
|
||||
return { ...existingWidget, ...incomingWidget };
|
||||
});
|
||||
},
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ export const initGitlabWebIDE = async (el) => {
|
|||
try {
|
||||
// See ClientOnlyConfig https://gitlab.com/gitlab-org/gitlab-web-ide/-/blob/main/packages/web-ide-types/src/config.ts#L17
|
||||
await start(rootEl, {
|
||||
...getBaseConfig(),
|
||||
...(await getBaseConfig()),
|
||||
nonce,
|
||||
httpHeaders,
|
||||
auth: oauthConfig,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import * as packageJSON from '@gitlab/web-ide/package.json';
|
||||
import { cleanEndingSeparator, joinPaths } from '~/lib/utils/url_utility';
|
||||
import { sha256 } from '~/lib/utils/text_utility';
|
||||
import { isMultiDomainEnabled } from './is_multi_domain_enabled';
|
||||
|
||||
const getGitLabUrl = (gitlabPath = '') => {
|
||||
|
|
@ -13,10 +14,13 @@ const getGitLabUrl = (gitlabPath = '') => {
|
|||
* Generates a base64 string based on the GitLab instance origin and the current username.
|
||||
* @returns {string}
|
||||
*/
|
||||
export const generateWorkbenchSubdomain = () =>
|
||||
btoa(`${window.location.origin}-${window.gon.current_username}`).replace(/\W+/g, '');
|
||||
export const generateWorkbenchSubdomain = async () => {
|
||||
const digest = await sha256(`${window.location.origin}-${window.gon.current_username}`);
|
||||
|
||||
const getWorkbenchUrlsMultiDomain = () => {
|
||||
return digest.substring(0, 30);
|
||||
};
|
||||
|
||||
const getWorkbenchUrlsMultiDomain = async () => {
|
||||
const workbenchVersion = packageJSON.version;
|
||||
|
||||
return {
|
||||
|
|
@ -24,7 +28,7 @@ const getWorkbenchUrlsMultiDomain = () => {
|
|||
* URL pointing to the origin and base path where the
|
||||
* Web IDE's workbench assets are hosted.
|
||||
*/
|
||||
workbenchBaseUrl: `https://workbench-${generateWorkbenchSubdomain()}.cdn.web-ide.gitlab-static.net/gitlab-web-ide-vscode-workbench-${workbenchVersion}`,
|
||||
workbenchBaseUrl: `https://workbench-${await generateWorkbenchSubdomain()}.cdn.web-ide.gitlab-static.net/gitlab-web-ide-vscode-workbench-${workbenchVersion}`,
|
||||
|
||||
/**
|
||||
* URL pointing to the origin and the base path where
|
||||
|
|
@ -52,7 +56,7 @@ const getWorkbenchUrlsSingleDomain = () => ({
|
|||
const getWorkbenchUrls = () =>
|
||||
isMultiDomainEnabled() ? getWorkbenchUrlsMultiDomain() : getWorkbenchUrlsSingleDomain();
|
||||
|
||||
export const getBaseConfig = () => ({
|
||||
export const getBaseConfig = async () => ({
|
||||
/**
|
||||
* URL pointing to the system embedding the Web IDE. Most of the
|
||||
* time, but not necessarily, is a GitLab instance.
|
||||
|
|
@ -65,5 +69,5 @@ export const getBaseConfig = () => ({
|
|||
*/
|
||||
gitlabUrl: getGitLabUrl(''),
|
||||
|
||||
...getWorkbenchUrls(),
|
||||
...(await getWorkbenchUrls()),
|
||||
});
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ export const mountOAuthCallback = async () => {
|
|||
|
||||
try {
|
||||
await oauthCallback({
|
||||
...getBaseConfig(),
|
||||
...(await getBaseConfig()),
|
||||
username: gon.current_username,
|
||||
auth: getOAuthConfig(el.dataset),
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import { showAlertFromLocalStorage } from '~/ci/runner/local_storage_alert/show_alert_from_local_storage';
|
||||
import { initProjectRunnerShow } from '~/ci/runner/project_runner_show';
|
||||
|
||||
showAlertFromLocalStorage();
|
||||
initProjectRunnerShow();
|
||||
|
|
|
|||
|
|
@ -27,10 +27,14 @@ export default () => {
|
|||
|
||||
if ($projectName || $projectPath) {
|
||||
// get the project name from the URL and set it as input value
|
||||
$projectName.value = name;
|
||||
if (name) {
|
||||
$projectName.value = name;
|
||||
}
|
||||
|
||||
// get the path url and append it in the input
|
||||
$projectPath.value = path;
|
||||
if (path) {
|
||||
$projectPath.value = path;
|
||||
}
|
||||
|
||||
// generate slug when project name changes
|
||||
$projectName.addEventListener('keyup', () => {
|
||||
|
|
|
|||
|
|
@ -239,8 +239,8 @@ const bindEvents = () => {
|
|||
|
||||
bindHowToImport();
|
||||
|
||||
$('.btn_import_gitlab_project').on('click contextmenu', () => {
|
||||
const importGitlabProjectBtn = document.querySelector('.btn_import_gitlab_project');
|
||||
$('.js-import-gitlab-project-btn').on('click contextmenu', () => {
|
||||
const importGitlabProjectBtn = document.querySelector('.js-import-gitlab-project-btn');
|
||||
const projectNamespaceId = document.querySelector('#project_namespace_id');
|
||||
|
||||
const { href: importHref } = importGitlabProjectBtn.dataset;
|
||||
|
|
|
|||
|
|
@ -24,10 +24,10 @@ export const useDiffsList = defineStore('diffsList', {
|
|||
actions: {
|
||||
withDebouncedAbortController: debounce(
|
||||
async function run(action) {
|
||||
const previousController = this.loadingController;
|
||||
this.loadingController?.abort?.();
|
||||
this.loadingController = new AbortController();
|
||||
try {
|
||||
await action(this.loadingController, previousController);
|
||||
await action(this.loadingController);
|
||||
} catch (error) {
|
||||
if (error.name !== 'AbortError') {
|
||||
this.status = statuses.error;
|
||||
|
|
@ -41,6 +41,7 @@ export const useDiffsList = defineStore('diffsList', {
|
|||
{ leading: true },
|
||||
),
|
||||
addLoadedFile({ target }) {
|
||||
if (this.status === statuses.fetching) return;
|
||||
this.loadedFiles = { ...this.loadedFiles, [target.id]: true };
|
||||
},
|
||||
fillInLoadedFiles() {
|
||||
|
|
@ -58,7 +59,7 @@ export const useDiffsList = defineStore('diffsList', {
|
|||
this.status = statuses.idle;
|
||||
},
|
||||
streamRemainingDiffs(url) {
|
||||
return this.withDebouncedAbortController(async ({ signal }, previousController) => {
|
||||
return this.withDebouncedAbortController(async ({ signal }) => {
|
||||
this.status = statuses.fetching;
|
||||
let request;
|
||||
let streamSignal = signal;
|
||||
|
|
@ -71,7 +72,6 @@ export const useDiffsList = defineStore('diffsList', {
|
|||
request = fetch(url, { signal });
|
||||
}
|
||||
const { body } = await request;
|
||||
if (previousController) previousController.abort();
|
||||
await this.renderDiffsStream(
|
||||
toPolyfillReadable(body),
|
||||
document.querySelector('#js-stream-container'),
|
||||
|
|
@ -90,14 +90,14 @@ export const useDiffsList = defineStore('diffsList', {
|
|||
});
|
||||
},
|
||||
reloadDiffs(url) {
|
||||
return this.withDebouncedAbortController(async ({ signal }, previousController) => {
|
||||
// TODO: handle loading state
|
||||
return this.withDebouncedAbortController(async ({ signal }) => {
|
||||
const container = document.querySelector('[data-diffs-list]');
|
||||
container.dataset.loading = 'true';
|
||||
this.loadedFiles = {};
|
||||
this.status = statuses.fetching;
|
||||
const { body } = await fetch(url, { signal });
|
||||
if (previousController) previousController.abort();
|
||||
this.loadedFiles = {};
|
||||
const container = document.querySelector('[data-diffs-list]');
|
||||
container.innerHTML = '';
|
||||
delete container.dataset.loading;
|
||||
await this.renderDiffsStream(toPolyfillReadable(body), container, signal);
|
||||
});
|
||||
},
|
||||
|
|
|
|||
|
|
@ -127,7 +127,11 @@ export default {
|
|||
@hidden="isDropdownOpen = false"
|
||||
>
|
||||
<template #toggle>
|
||||
<button class="gl-rounded-pill gl-border-none gl-bg-transparent gl-p-0 gl-leading-0">
|
||||
<button
|
||||
class="gl-rounded-pill gl-border-none gl-bg-transparent gl-p-0 gl-leading-0"
|
||||
data-event-tracking="click_dropdown_showing_recent_mrs_for_file_on_branch"
|
||||
:data-event-value="openMrsCount"
|
||||
>
|
||||
<gl-badge
|
||||
v-gl-tooltip
|
||||
data-testid="open-mr-badge"
|
||||
|
|
|
|||
|
|
@ -61,8 +61,8 @@ export const TOKEN_EMPTY_SEARCH_TERM = {
|
|||
},
|
||||
};
|
||||
|
||||
export const TOKEN_TITLE_APPROVED_BY = __('Approved-By');
|
||||
export const TOKEN_TITLE_MERGE_USER = __('Merged-By');
|
||||
export const TOKEN_TITLE_APPROVED_BY = __('Approved by');
|
||||
export const TOKEN_TITLE_MERGE_USER = __('Merged by');
|
||||
export const TOKEN_TITLE_APPROVER = __('Approver');
|
||||
export const TOKEN_TITLE_ASSIGNEE = s__('SearchToken|Assignee');
|
||||
export const TOKEN_TITLE_AUTHOR = __('Author');
|
||||
|
|
@ -90,8 +90,8 @@ export const TOKEN_TITLE_VERSION = __('Version');
|
|||
export const TOKEN_TITLE_SEARCH_WITHIN = __('Search within');
|
||||
export const TOKEN_TITLE_CREATED = __('Created date');
|
||||
export const TOKEN_TITLE_CLOSED = __('Closed date');
|
||||
export const TOKEN_TITLE_DEPLOYED_BEFORE = __('Deployed-before');
|
||||
export const TOKEN_TITLE_DEPLOYED_AFTER = __('Deployed-after');
|
||||
export const TOKEN_TITLE_DEPLOYED_BEFORE = __('Deployed before');
|
||||
export const TOKEN_TITLE_DEPLOYED_AFTER = __('Deployed after');
|
||||
export const TOKEN_TITLE_ASSIGNED_SEAT = __('Assigned seat');
|
||||
export const TOKEN_TITLE_ENVIRONMENT = __('Environment');
|
||||
export const TOKEN_TITLE_STATE = __('State');
|
||||
|
|
|
|||
|
|
@ -371,6 +371,7 @@ export default {
|
|||
:noteable-type="workItemTypeKey"
|
||||
>
|
||||
<markdown-editor
|
||||
class="js-gfm-wrapper"
|
||||
:value="commentText"
|
||||
:render-markdown-path="markdownPreviewPath"
|
||||
:markdown-docs-path="$options.constantOptions.markdownDocsPath"
|
||||
|
|
@ -378,6 +379,9 @@ export default {
|
|||
:autocomplete-data-sources="autocompleteDataSources"
|
||||
:form-field-props="formFieldProps"
|
||||
:uploads-path="uploadsPath"
|
||||
:data-work-item-full-path="fullPath"
|
||||
:data-work-item-id="workItemId"
|
||||
:data-work-item-iid="workItemIid"
|
||||
use-bottom-toolbar
|
||||
supports-quick-actions
|
||||
:autofocus="autofocus"
|
||||
|
|
|
|||
|
|
@ -328,8 +328,6 @@ export default {
|
|||
:anchor-id="widgetName"
|
||||
:title="$options.i18n.title"
|
||||
:is-loading="isLoading"
|
||||
:data-work-item-full-path="workItemFullPath"
|
||||
:data-work-item-iid="workItemIid"
|
||||
is-collapsible
|
||||
persist-collapsed-state
|
||||
data-testid="work-item-relationships"
|
||||
|
|
|
|||
|
|
@ -31,6 +31,15 @@
|
|||
flex: 1 0;
|
||||
}
|
||||
|
||||
.rd-app-diffs-list {
|
||||
transition: opacity 0.2s;
|
||||
}
|
||||
|
||||
.rd-app-diffs-list[data-loading] {
|
||||
opacity: 0.5;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.rd-app-sidebar {
|
||||
top: var(--rd-app-sticky-top-with-padding) !important;
|
||||
width: 320px;
|
||||
|
|
|
|||
|
|
@ -1,3 +0,0 @@
|
|||
.runner-details-grid-template {
|
||||
grid-template-columns: auto 1fr;
|
||||
}
|
||||
|
|
@ -28,7 +28,7 @@
|
|||
= render RapidDiffs::EmptyStateComponent.new
|
||||
|
||||
.code{ class: helpers.user_color_scheme }
|
||||
%div{ data: { diffs_list: true } }
|
||||
.rd-app-diffs-list{ data: { diffs_list: true } }
|
||||
= javascript_tag nonce: content_security_policy_nonce do
|
||||
:plain
|
||||
requestAnimationFrame(() => { window.performance.mark('rapid-diffs-first-diff-file-shown') })
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
-# TODO: add fork suggestion (commits only)
|
||||
|
||||
%diff-file.rd-diff-file-component{ id: id, data: server_data }
|
||||
%diff-file.rd-diff-file-component{ id: id, data: { testid: 'rd-diff-file', **server_data } }
|
||||
.rd-diff-file
|
||||
= render RapidDiffs::DiffFileHeaderComponent.new(diff_file: @diff_file)
|
||||
-# extra wrapper needed so content-visibility: hidden doesn't require removing border or other styles
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
module EventForward
|
||||
class EventForwardController < BaseActionController
|
||||
def forward
|
||||
if ::Feature.enabled?('collect_product_usage_events', :instance)
|
||||
if ::Feature.enabled?(:collect_product_usage_events, :instance)
|
||||
process_events
|
||||
|
||||
head :ok
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
- add_page_specific_style 'page_bundles/ci_status'
|
||||
- add_page_specific_style 'page_bundles/runner_details'
|
||||
|
||||
- runner_name = runner_short_name(@runner)
|
||||
- breadcrumb_title runner_name
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
- add_page_specific_style 'page_bundles/ci_status'
|
||||
- add_page_specific_style 'page_bundles/runner_details'
|
||||
|
||||
- runner_name = runner_short_name(@runner)
|
||||
- breadcrumb_title runner_name
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@
|
|||
.import-buttons
|
||||
- if gitlab_project_import_enabled?
|
||||
.import_gitlab_project.has-tooltip{ data: { container: 'body', testid: 'gitlab-import-button' } }
|
||||
= render Pajamas::ButtonComponent.new(href: '#', icon: 'tanuki', button_options: { class: 'btn_import_gitlab_project js-import-project-btn', data: { href: new_import_gitlab_project_path, platform: 'gitlab_export', **tracking_attrs_data('import_project', 'click_button', 'gitlab_export') } }) do
|
||||
= render Pajamas::ButtonComponent.new(href: '#', icon: 'tanuki', button_options: { class: 'js-import-gitlab-project-btn js-import-project-btn', data: { href: new_import_gitlab_project_path, platform: 'gitlab_export', **tracking_attrs_data('import_project', 'click_button', 'gitlab_export') } }) do
|
||||
= _('GitLab export')
|
||||
|
||||
- if github_import_enabled?
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
- runner_name = runner_short_name(@runner)
|
||||
- project_runners_path = project_settings_ci_cd_path(@project)
|
||||
- breadcrumb_title runner_name
|
||||
- page_title runner_name
|
||||
- add_to_breadcrumbs _('CI/CD Settings'), project_settings_ci_cd_path(@project)
|
||||
- add_to_breadcrumbs _('CI/CD Settings'), project_runners_path
|
||||
|
||||
= render 'shared/runners/runner_details', runner: @runner
|
||||
#js-project-runner-show{ data: { runner_id: @runner.id } }
|
||||
|
|
|
|||
|
|
@ -1,59 +0,0 @@
|
|||
%h1.page-title.gl-text-size-h-display.gl-flex.gl-items-center
|
||||
= s_('Runners|Runner #%{runner_id}') % { runner_id: runner.id }
|
||||
= render 'shared/runners/runner_type_badge', runner: runner
|
||||
|
||||
-# Only shows details for one manager
|
||||
- runner_manager = runner.runner_managers.order_contacted_at_desc.first
|
||||
|
||||
.table-holder
|
||||
%table.table
|
||||
%thead
|
||||
%tr
|
||||
%th= s_('Runners|Property Name')
|
||||
%th= s_('Runners|Value')
|
||||
%tr
|
||||
%td= s_('Runners|Description')
|
||||
%td= runner.description
|
||||
%tr
|
||||
%td= s_('Runners|Paused')
|
||||
%td= runner.active? ? _('No') : _('Yes')
|
||||
%tr
|
||||
%td= s_('Runners|Protected')
|
||||
%td= runner.ref_protected? ? _('Yes') : _('No')
|
||||
%tr
|
||||
%td= s_('Runners|Can run untagged jobs')
|
||||
%td= runner.run_untagged? ? _('Yes') : _('No')
|
||||
- unless runner.group_type?
|
||||
%tr
|
||||
%td= s_('Runners|Locked to this project')
|
||||
%td= runner.locked? ? _('Yes') : _('No')
|
||||
%tr
|
||||
%td= s_('Runners|Tags')
|
||||
%td
|
||||
- runner.tag_list.sort.each do |tag|
|
||||
= gl_badge_tag tag, variant: :info
|
||||
%tr
|
||||
%td= s_('Runners|Maximum job timeout')
|
||||
%td= runner.maximum_timeout_human_readable
|
||||
%tr
|
||||
%td= s_('Runners|Last contact')
|
||||
%td
|
||||
- if runner.contacted_at
|
||||
= time_ago_with_tooltip runner.contacted_at
|
||||
- else
|
||||
= _('Never')
|
||||
%tr
|
||||
%td= s_('Runners|Version')
|
||||
%td= runner_manager&.version
|
||||
%tr
|
||||
%td= s_('Runners|IP Address')
|
||||
%td= runner_manager&.ip_address
|
||||
%tr
|
||||
%td= s_('Runners|Revision')
|
||||
%td= runner_manager&.revision
|
||||
%tr
|
||||
%td= s_('Runners|Platform')
|
||||
%td= runner_manager&.platform
|
||||
%tr
|
||||
%td= s_('Runners|Architecture')
|
||||
%td= runner_manager&.architecture
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
.gl-ml-2
|
||||
- if runner.instance_type?
|
||||
= gl_badge_tag s_('Runners|shared'), variant: :success
|
||||
- elsif runner.group_type?
|
||||
= gl_badge_tag s_('Runners|group'), variant: :success
|
||||
- else
|
||||
= gl_badge_tag s_('Runners|project'), variant: :info
|
||||
|
|
@ -358,7 +358,6 @@ module Gitlab
|
|||
config.assets.precompile << "page_bundles/reports.css"
|
||||
config.assets.precompile << "page_bundles/requirements.css"
|
||||
config.assets.precompile << "page_bundles/roadmap.css"
|
||||
config.assets.precompile << "page_bundles/runner_details.css"
|
||||
config.assets.precompile << "page_bundles/runners.css"
|
||||
config.assets.precompile << "page_bundles/search.css"
|
||||
config.assets.precompile << "page_bundles/security_dashboard.css"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,20 @@
|
|||
---
|
||||
description: Opened dropdown of recent MRs modifying this file and targeting the current branch
|
||||
internal_events: true
|
||||
action: click_dropdown_showing_recent_mrs_for_file_on_branch
|
||||
identifiers:
|
||||
- project
|
||||
- namespace
|
||||
- user
|
||||
additional_properties:
|
||||
value:
|
||||
description: Number of matching MRs returned in the dropdown
|
||||
product_group: source_code
|
||||
product_categories:
|
||||
- source_code_management
|
||||
milestone: '17.11'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/187097
|
||||
tiers:
|
||||
- free
|
||||
- premium
|
||||
- ultimate
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
---
|
||||
name: use_primary_and_secondary_stores_for_buffered_counter
|
||||
feature_issue_url: https://gitlab.com/gitlab-com/gl-infra/data-access/durability/team/-/issues/71
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/182086
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/520435
|
||||
milestone: '17.10'
|
||||
group: group::durability
|
||||
type: gitlab_com_derisk
|
||||
default_enabled: false
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
---
|
||||
name: use_primary_store_as_default_for_buffered_counter
|
||||
feature_issue_url: https://gitlab.com/gitlab-com/gl-infra/data-access/durability/team/-/issues/71
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/182086
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/520436
|
||||
milestone: '17.10'
|
||||
group: group::durability
|
||||
type: gitlab_com_derisk
|
||||
default_enabled: false
|
||||
|
|
@ -18,7 +18,7 @@ end
|
|||
if Gitlab.config.cell.enabled
|
||||
print_error.call("Cell ID is not set to a valid positive integer.") if Gitlab.config.cell.id.to_i < 1
|
||||
|
||||
Settings.topology_service_settings.each do |setting|
|
||||
Settings.required_topology_service_settings.each do |setting|
|
||||
setting_value = Gitlab.config.cell.topology_service_client.send(setting)
|
||||
print_error.call("Topology Service setting '#{setting}' is not set.") if setting_value.blank?
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,24 @@
|
|||
---
|
||||
key_path: redis_hll_counters.count_distinct_user_id_from_click_dropdown_showing_recent_mrs_for_file_on_branch
|
||||
description: >-
|
||||
Count of unique users who opened the dropdown to view merge requests created in the past 30 days that target the current branch and modify the selected file.
|
||||
product_group: source_code
|
||||
product_categories:
|
||||
- source_code_management
|
||||
performance_indicator_type: []
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: '17.11'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/187097
|
||||
time_frame:
|
||||
- 28d
|
||||
- 7d
|
||||
data_source: internal_events
|
||||
data_category: optional
|
||||
tiers:
|
||||
- free
|
||||
- premium
|
||||
- ultimate
|
||||
events:
|
||||
- name: click_dropdown_showing_recent_mrs_for_file_on_branch
|
||||
unique: user.id
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
---
|
||||
key_path: counts.count_total_click_dropdown_showing_recent_mrs_for_file_on_branch
|
||||
description: >-
|
||||
Total count of clicks to open the dropdown to view merge requests created in the past 30 days that target the current branch and modify the selected file
|
||||
product_group: source_code
|
||||
product_categories:
|
||||
- source_code_management
|
||||
performance_indicator_type: []
|
||||
value_type: number
|
||||
status: active
|
||||
milestone: '17.11'
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/187097
|
||||
time_frame:
|
||||
- 28d
|
||||
- 7d
|
||||
- all
|
||||
data_source: internal_events
|
||||
data_category: optional
|
||||
tiers:
|
||||
- free
|
||||
- premium
|
||||
- ultimate
|
||||
events:
|
||||
- name: click_dropdown_showing_recent_mrs_for_file_on_branch
|
||||
|
|
@ -1,7 +1,5 @@
|
|||
---
|
||||
development:
|
||||
buffered_counter:
|
||||
url: redis://localhost:6379
|
||||
chat:
|
||||
cluster:
|
||||
- redis://localhost:7001
|
||||
|
|
@ -30,8 +28,6 @@ development:
|
|||
url: redis://localhost:6379
|
||||
|
||||
test:
|
||||
buffered_counter:
|
||||
url: redis://localhost:6379
|
||||
chat:
|
||||
cluster:
|
||||
- redis://localhost:7001
|
||||
|
|
|
|||
|
|
@ -197,8 +197,8 @@ Settings = GitlabSettings.load(file, Rails.env) do
|
|||
[[Gitlab::SidekiqConfig::WorkerMatcher::WILDCARD_MATCH, 'default']]
|
||||
end
|
||||
|
||||
def topology_service_settings
|
||||
%w[address ca_file certificate_file private_key_file]
|
||||
def required_topology_service_settings
|
||||
%w[address]
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
- title: "ZenTao integration" # (required) Clearly explain the change, or planned change. For example, "The `confidential` field for a `Note` is deprecated" or "The maximum number of characters in a job name will be limited to 250."
|
||||
announcement_milestone: "15.7" # (required) The milestone when this feature was first announced as deprecated.
|
||||
removal_milestone: "18.0" # (required) The milestone when this feature is planned to be removed
|
||||
removal_milestone: "19.0" # (required) The milestone when this feature is planned to be removed
|
||||
breaking_change: true # (required) If this deprecation is a breaking change, set this value to true
|
||||
reporter: arturoherrero # (required) GitLab username of the person reporting the deprecation
|
||||
stage: Foundations # (required) String value of the stage that the feature was created in. e.g., Growth
|
||||
|
|
|
|||
|
|
@ -5,4 +5,4 @@ feature_category: observability
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/174508
|
||||
milestone: '17.7'
|
||||
queued_migration_version: 20241203081756
|
||||
finalized_by: # version of the migration that finalized this BBM
|
||||
finalized_by: '20250407231859'
|
||||
|
|
|
|||
|
|
@ -9,14 +9,6 @@ description: Stores information about connections between external issue tracker
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/48465
|
||||
milestone: '13.7'
|
||||
gitlab_schema: gitlab_sec
|
||||
desired_sharding_key:
|
||||
project_id:
|
||||
references: projects
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: vulnerability_id
|
||||
table: vulnerabilities
|
||||
sharding_key: project_id
|
||||
belongs_to: vulnerability
|
||||
desired_sharding_key_migration_job_name: BackfillVulnerabilityExternalIssueLinksProjectId
|
||||
table_size: small
|
||||
sharding_key:
|
||||
project_id: projects
|
||||
|
|
|
|||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddPackagesNpmMetadataProjectIdNotNull < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.11'
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_not_null_constraint :packages_npm_metadata, :project_id, validate: false
|
||||
end
|
||||
|
||||
def down
|
||||
remove_not_null_constraint :packages_npm_metadata, :project_id
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class PreparePackagesNpmMetadataProjectIdNotNullValidation < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '17.11'
|
||||
|
||||
CONSTRAINT_NAME = :check_8d2e047947
|
||||
|
||||
def up
|
||||
prepare_async_check_constraint_validation :packages_npm_metadata, name: CONSTRAINT_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
unprepare_async_check_constraint_validation :packages_npm_metadata, name: CONSTRAINT_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddVulnerabilityExternalIssueLinksProjectIdNotNull < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.11'
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_not_null_constraint :vulnerability_external_issue_links, :project_id
|
||||
end
|
||||
|
||||
def down
|
||||
remove_not_null_constraint :vulnerability_external_issue_links, :project_id
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class FinalizeHkBackfillIssuableMetricImagesNamespaceId < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.11'
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
|
||||
|
||||
def up
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: 'BackfillIssuableMetricImagesNamespaceId',
|
||||
table_name: :issuable_metric_images,
|
||||
column_name: :id,
|
||||
job_arguments: [:namespace_id, :issues, :namespace_id, :issue_id],
|
||||
finalize: true
|
||||
)
|
||||
end
|
||||
|
||||
def down; end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
b01847072276c7499a86b35bad7b6324eceba9daafafae75e39e528c1836195f
|
||||
|
|
@ -0,0 +1 @@
|
|||
42428a8dab28ab5410be5a93bd79bfcc0e28b3763b7e0026f3ee78d4c4a604c9
|
||||
|
|
@ -0,0 +1 @@
|
|||
70ef76809f89752d5e524e7b1a1be1ec4f5b41c0268b81cd1c721ecf72028c19
|
||||
|
|
@ -0,0 +1 @@
|
|||
ced4698752a0c5101127b213c19eb4c8d058373a86e018caba3cf8019a2b530b
|
||||
|
|
@ -24677,7 +24677,8 @@ CREATE TABLE vulnerability_external_issue_links (
|
|||
external_issue_key text NOT NULL,
|
||||
project_id bigint,
|
||||
CONSTRAINT check_3200604f5e CHECK ((char_length(external_issue_key) <= 255)),
|
||||
CONSTRAINT check_68cffd19b0 CHECK ((char_length(external_project_key) <= 255))
|
||||
CONSTRAINT check_68cffd19b0 CHECK ((char_length(external_project_key) <= 255)),
|
||||
CONSTRAINT check_9bbcf5afdd CHECK ((project_id IS NOT NULL))
|
||||
);
|
||||
|
||||
CREATE SEQUENCE vulnerability_external_issue_links_id_seq
|
||||
|
|
@ -28871,6 +28872,9 @@ ALTER TABLE description_versions
|
|||
ALTER TABLE ONLY group_type_ci_runners
|
||||
ADD CONSTRAINT check_81b90172a6 UNIQUE (id);
|
||||
|
||||
ALTER TABLE packages_npm_metadata
|
||||
ADD CONSTRAINT check_8d2e047947 CHECK ((project_id IS NOT NULL)) NOT VALID;
|
||||
|
||||
ALTER TABLE sprints
|
||||
ADD CONSTRAINT check_ccd8a1eae0 CHECK ((start_date IS NOT NULL)) NOT VALID;
|
||||
|
||||
|
|
|
|||
|
|
@ -116,11 +116,11 @@ The cells related configuration in `config/gitlab.yml` is in this format:
|
|||
|--------------------------------------------|-------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `cell.enabled` | `false` | To configure whether the instance is a Cell or not. `false` means all Cell features are disabled. `session_cookie_prefix_token` is not affected, and can be set separately. |
|
||||
| `cell.id` | `nil` | Required to be a positive integer when `cell.enabled` is `true`. Otherwise, it must be `nil`. This is the unique integer identifier for the cell in a cluster. This ID is used inside the routable tokens. When `cell.id` is `nil`, the other attributes inside the routable tokens, like `organization_id` will still be used |
|
||||
| `cell.database.skip_sequence_alteration` | `false` | When `true`, skips database sequence alteration for the cell. Enable for the legacy cell (`cell-1`) before the monolith cell is available for use, being tracked in this epic: [Phase 6: Monolith Cell](https://gitlab.com/groups/gitlab-org/-/epics/14513). |
|
||||
| `cell.topology_service_client.address` | `"topology-service.gitlab.example.com:443"` | Required when `cell.enabled` is `true`. Address and port of the topology service server. |
|
||||
| `cell.topology_service_client.ca_file` | `"/home/git/gitlab/config/topology-service-ca.pem"` | Required when `cell.enabled` is `true`. Path to the CA certificate file for secure communication. |
|
||||
| `cell.topology_service_client.certificate_file` | `"/home/git/gitlab/config/topology-service-cert.pem"` | Required when `cell.enabled` is `true`. Path to the client certificate file. |
|
||||
| `cell.topology_service_client.private_key_file` | `"/home/git/gitlab/config/topology-service-key.pem"` | Required when `cell.enabled` is `true`. Path to the private key file. |
|
||||
| `cell.database.skip_sequence_alteration` | `false` | When `true`, skips database sequence alteration for the cell. Enable for the legacy cell (`cell-1`) before the monolith cell is available for use, being tracked in this epic: [Phase 6: Monolith Cell](https://gitlab.com/groups/gitlab-org/-/epics/14513). |
|
||||
| `cell.topology_service_client.address` | `"topology-service.gitlab.example.com:443"` | Required when `cell.enabled` is `true`. Address and port of the topology service server. |
|
||||
| `cell.topology_service_client.ca_file` | `"/home/git/gitlab/config/topology-service-ca.pem"` | Path to the CA certificate file for secure communication. This is not used at the moment. |
|
||||
| `cell.topology_service_client.certificate_file` | `"/home/git/gitlab/config/topology-service-cert.pem"` | Path to the client certificate file. This is not used at the moment. |
|
||||
| `cell.topology_service_client.private_key_file` | `"/home/git/gitlab/config/topology-service-key.pem"` | Path to the private key file. This is not used at the moment. |
|
||||
|
||||
## Related configuration
|
||||
|
||||
|
|
|
|||
|
|
@ -934,7 +934,7 @@ The following are some available Rake tasks:
|
|||
| [`sudo gitlab-rake gitlab:elastic:clear_index_status`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Deletes all instances of IndexStatus for all projects. This command results in a complete wipe of the index, and it should be used with caution. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:create_empty_index`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Generates empty indices (the default index and a separate issues index) and assigns an alias for each on the Elasticsearch side only if it doesn't already exist. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:delete_index`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Removes the GitLab indices and aliases (if they exist) on the Elasticsearch instance. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:recreate_index`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Wrapper task for `gitlab:elastic:delete_index` and `gitlab:elastic:create_empty_index`. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:recreate_index`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Wrapper task for `gitlab:elastic:delete_index` and `gitlab:elastic:create_empty_index`. Does not queue any indexing jobs. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:index_snippets`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Performs an Elasticsearch import that indexes the snippets data. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:index_users`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Imports all users into Elasticsearch. |
|
||||
| [`sudo gitlab-rake gitlab:elastic:projects_not_indexed`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/tasks/gitlab/elastic.rake) | Displays which projects do not have repository data indexed. This task does not include non-repository data such as issues, merge requests, or milestones. |
|
||||
|
|
|
|||
|
|
@ -95,7 +95,7 @@ See [Elasticsearch Index Scopes](../../advanced_search/elasticsearch.md#advanced
|
|||
|
||||
## No search results after switching Elasticsearch servers
|
||||
|
||||
To reindex the database, repositories, and wikis, run all Rake tasks again.
|
||||
To reindex the database, repositories, and wikis, [index the instance](../../advanced_search/elasticsearch.md#index-the-instance).
|
||||
|
||||
## Indexing fails with `error: elastic: Error 429 (Too Many Requests)`
|
||||
|
||||
|
|
|
|||
|
|
@ -73,7 +73,6 @@ This window takes place on May 5 - 7, 2025 from 09:00 UTC to 22:00 UTC.
|
|||
|-------------|--------|-------|-------|------------------------|
|
||||
| [GitLab.com certificate-based integration with Kubernetes](https://gitlab.com/groups/gitlab-org/configure/-/epics/8) | Medium | Deploy | | |
|
||||
| [Runner `active` GraphQL fields replaced by `paused`](https://gitlab.com/gitlab-org/gitlab/-/issues/351109) | Low | Verify | Instance, group, project | |
|
||||
| [ZenTao integration](https://gitlab.com/gitlab-org/gitlab/-/issues/377825) | Low | Foundations | Instance | |
|
||||
| [GraphQL deprecation of `dependencyProxyTotalSizeInBytes` field](https://gitlab.com/gitlab-org/gitlab/-/issues/414236) | Low | Package | Group | |
|
||||
| [The `ci_job_token_scope_enabled` projects API attribute is deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/423091) | Low | Govern | Project | |
|
||||
| [Deprecate license metadata format V1](https://gitlab.com/gitlab-org/gitlab/-/issues/438477) | Low | Secure | Instance | |
|
||||
|
|
|
|||
|
|
@ -633,6 +633,23 @@ The `Project.services` GraphQL field is deprecated. A `Project.integrations` fie
|
|||
|
||||
<div class="deprecation breaking-change" data-milestone="19.0">
|
||||
|
||||
### ZenTao integration
|
||||
|
||||
<div class="deprecation-notes">
|
||||
|
||||
- Announced in GitLab <span class="milestone">15.7</span>
|
||||
- Removal in GitLab <span class="milestone">19.0</span> ([breaking change](https://docs.gitlab.com/update/terminology/#breaking-change))
|
||||
- To discuss this change or learn more, see the [deprecation issue](https://gitlab.com/gitlab-org/gitlab/-/issues/377825).
|
||||
|
||||
</div>
|
||||
|
||||
The [ZenTao product integration](https://docs.gitlab.com/user/project/integrations/zentao/) has been deprecated
|
||||
and will be moved to the JiHu GitLab codebase.
|
||||
|
||||
</div>
|
||||
|
||||
<div class="deprecation breaking-change" data-milestone="19.0">
|
||||
|
||||
### `scanResultPolicies` GraphQL field is deprecated
|
||||
|
||||
<div class="deprecation-notes">
|
||||
|
|
@ -2274,23 +2291,6 @@ To prepare for this change:
|
|||
|
||||
<div class="deprecation breaking-change" data-milestone="18.0">
|
||||
|
||||
### ZenTao integration
|
||||
|
||||
<div class="deprecation-notes">
|
||||
|
||||
- Announced in GitLab <span class="milestone">15.7</span>
|
||||
- Removal in GitLab <span class="milestone">18.0</span> ([breaking change](https://docs.gitlab.com/update/terminology/#breaking-change))
|
||||
- To discuss this change or learn more, see the [deprecation issue](https://gitlab.com/gitlab-org/gitlab/-/issues/377825).
|
||||
|
||||
</div>
|
||||
|
||||
The [ZenTao product integration](https://docs.gitlab.com/user/project/integrations/zentao/) has been deprecated
|
||||
and will be moved to the JiHu GitLab codebase.
|
||||
|
||||
</div>
|
||||
|
||||
<div class="deprecation breaking-change" data-milestone="18.0">
|
||||
|
||||
### `RemoteDevelopmentAgentConfig` GraphQL type is deprecated
|
||||
|
||||
<div class="deprecation-notes">
|
||||
|
|
|
|||
|
|
@ -115,18 +115,6 @@ Amazon Q can make code changes based on reviewer feedback.
|
|||
|
||||
Amazon Q proposes changes to the merge request based on the reviewer's comments and feedback.
|
||||
|
||||
### View suggested fixes
|
||||
|
||||
After Amazon Q has reviewed your code and added comments that explain potential issues,
|
||||
Amazon Q can reply to these comments with suggested fixes.
|
||||
|
||||
1. Open a merge request that has feedback from Amazon Q.
|
||||
1. On the **Overview** tab, go to the comment you want to address.
|
||||
1. Type `/q fix`.
|
||||
1. Select **Add comment now**.
|
||||
|
||||
Amazon Q proposes fixes for the issue in the comment.
|
||||
|
||||
### Generate unit tests
|
||||
|
||||
Generate new unit tests while you're having your merge request reviewed.
|
||||
|
|
|
|||
|
|
@ -127,9 +127,9 @@ To view the open merge requests for a file:
|
|||
|
||||
{{< history >}}
|
||||
|
||||
- Filtering by `source-branch` [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/134555) in GitLab 16.6.
|
||||
- Filtering by `merged-by` [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/140002) in GitLab 16.9. Available only when the feature flag `mr_merge_user_filter` is enabled.
|
||||
- Filtering by `merged-by` [generally available](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/142666) in GitLab 17.0. Feature flag `mr_merge_user_filter` removed.
|
||||
- Filtering by `source branch` [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/134555) in GitLab 16.6.
|
||||
- Filtering by `merged by` [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/140002) in GitLab 16.9. Available only when the feature flag `mr_merge_user_filter` is enabled.
|
||||
- Filtering by `merged by` [generally available](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/142666) in GitLab 17.0. Feature flag `mr_merge_user_filter` removed.
|
||||
|
||||
{{< /history >}}
|
||||
|
||||
|
|
@ -142,10 +142,10 @@ To filter the list of merge requests:
|
|||
- [**By environment or deployment date**](#by-environment-or-deployment-date).
|
||||
- **ID**: Enter filter `#30` to return only merge request 30.
|
||||
- User filters: Type (or select from the dropdown list) any of these filters to display a list of users:
|
||||
- **Approved-By**, for merge requests already approved by a user. Premium and Ultimate only.
|
||||
- **Approved by**, for merge requests already approved by a user. Premium and Ultimate only.
|
||||
- **Approver**, for merge requests that this user is eligible to approve.
|
||||
(For more information, read about [Code owners](../codeowners/_index.md)). Premium and Ultimate only.
|
||||
- **Merged-By**, for merge requests merged by this user.
|
||||
- **Merged by**, for merge requests merged by this user.
|
||||
- **Reviewer**, for merge requests reviewed by this user.
|
||||
1. Select or type the operator to use for filtering the attribute. The following operators are
|
||||
available:
|
||||
|
|
@ -164,8 +164,8 @@ To filter merge requests by deployment data, such as the environment or a date,
|
|||
you can type (or select from the dropdown list) the following:
|
||||
|
||||
- Environment
|
||||
- Deployed-before
|
||||
- Deployed-after
|
||||
- Deployed before
|
||||
- Deployed after
|
||||
|
||||
{{< alert type="note" >}}
|
||||
|
||||
|
|
@ -177,7 +177,7 @@ do not return results, as this method does not create a merge commit.
|
|||
When filtering by an environment, a dropdown list presents all environments that
|
||||
you can choose from.
|
||||
|
||||
When filtering by `Deployed-before` or `Deployed-after`:
|
||||
When filtering by `Deployed before` or `Deployed after`:
|
||||
|
||||
- The date refers to when the deployment to an environment (triggered by the
|
||||
merge commit) completed successfully.
|
||||
|
|
|
|||
|
|
@ -217,6 +217,7 @@ To add a new collection:
|
|||
1. Implement the `self.queue` class method to return the associated queue
|
||||
1. Implement the `self.reference_klass` or `self.reference_klasses` class method to return the references for an object
|
||||
1. Implement the `self.routing(object)` class method to determine how an object should be routed
|
||||
1. Implement the `self.ids_to_objects(ids)` class method to convert ids into objects for redaction.
|
||||
|
||||
Example:
|
||||
|
||||
|
|
@ -244,6 +245,10 @@ module Ai
|
|||
def self.routing(object)
|
||||
object.project.root_ancestor.id
|
||||
end
|
||||
|
||||
def self.ids_to_objects(ids)
|
||||
::MergeRequest.id_in(ids)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -362,7 +367,7 @@ end
|
|||
```ruby
|
||||
query = ActiveContext::Query.filter(project_id: 1).limit(1)
|
||||
|
||||
results = ActiveContext.adapter.search(collection: "gitlab_active_context_code_embeddings", query: query)
|
||||
results = Ai::Context::Collections::MergeRequest.search(user: current_user, query: query)
|
||||
|
||||
results.to_a
|
||||
```
|
||||
|
|
@ -374,7 +379,7 @@ target_embedding = ::ActiveContext::Embeddings.generate_embeddings("some text")
|
|||
|
||||
query = ActiveContext::Query.filter(project_id: 1).knn(target: "embeddings", vector: target_embedding, limit: 1)
|
||||
|
||||
result = ActiveContext.adapter.search(collection: "gitlab_active_context_code_embeddings", query: query)
|
||||
results = Ai::Context::Collections::MergeRequest.search(user: current_user, query: query)
|
||||
|
||||
result.to_a
|
||||
results.to_a
|
||||
```
|
||||
|
|
|
|||
|
|
@ -10,6 +10,10 @@ module ActiveContext
|
|||
ActiveContext::Tracker.track!(objects, collection: self)
|
||||
end
|
||||
|
||||
def search(user:, query:)
|
||||
ActiveContext.adapter.search(query: query, user: user, collection: self)
|
||||
end
|
||||
|
||||
def collection_name
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
|
@ -22,6 +26,10 @@ module ActiveContext
|
|||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def ids_to_objects(_)
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def reference_klasses
|
||||
Array.wrap(reference_klass).tap do |klasses|
|
||||
raise NotImplementedError, "#{self} should define reference_klasses or reference_klass" if klasses.empty?
|
||||
|
|
@ -35,6 +43,25 @@ module ActiveContext
|
|||
def collection_record
|
||||
ActiveContext::CollectionCache.fetch(collection_name)
|
||||
end
|
||||
|
||||
def redact_unauthorized_results!(result)
|
||||
objects = ids_to_objects(result.ids)
|
||||
id_to_object_map = objects.index_by { |object| object.id.to_s }
|
||||
|
||||
authorized_ids = Set.new(objects.select do |object|
|
||||
authorized_to_see_object?(result.user, object)
|
||||
end.map(&:id).map(&:to_s))
|
||||
|
||||
result.ids
|
||||
.select { |id| authorized_ids.include?(id.to_s) }
|
||||
.map { |id| id_to_object_map[id.to_s] }
|
||||
end
|
||||
|
||||
def authorized_to_see_object?(user, object)
|
||||
return true unless object.respond_to?(:to_ability_name) && DeclarativePolicy.has_policy?(object)
|
||||
|
||||
Ability.allowed?(user, :"read_#{object.to_ability_name}", object)
|
||||
end
|
||||
end
|
||||
|
||||
attr_reader :object
|
||||
|
|
|
|||
|
|
@ -6,9 +6,29 @@ module ActiveContext
|
|||
module QueryResult
|
||||
include Enumerable
|
||||
|
||||
attr_reader :user
|
||||
|
||||
def initialize(result:, collection:, user:)
|
||||
@result = result
|
||||
@collection = collection
|
||||
@user = user
|
||||
end
|
||||
|
||||
def authorized_results
|
||||
@authorized_results ||= collection.redact_unauthorized_results!(self)
|
||||
end
|
||||
|
||||
def ids
|
||||
each.pluck('ref_id')
|
||||
end
|
||||
|
||||
def each
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :result, :collection
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -15,12 +15,11 @@ module ActiveContext
|
|||
@options = options
|
||||
end
|
||||
|
||||
def search(collection:, query:)
|
||||
raise ArgumentError, "Expected Query object, you used #{query.class}" unless query.is_a?(ActiveContext::Query)
|
||||
|
||||
def search(user:, collection:, query:)
|
||||
es_query = Processor.transform(collection, query)
|
||||
res = client.search(index: collection, body: es_query)
|
||||
QueryResult.new(res)
|
||||
result = client.search(index: collection.collection_name, body: es_query)
|
||||
|
||||
QueryResult.new(result: result, collection: collection, user: user).authorized_results
|
||||
end
|
||||
|
||||
def client
|
||||
|
|
|
|||
|
|
@ -6,10 +6,6 @@ module ActiveContext
|
|||
class QueryResult
|
||||
include ActiveContext::Databases::Concerns::QueryResult
|
||||
|
||||
def initialize(result)
|
||||
@result = result
|
||||
end
|
||||
|
||||
def count
|
||||
result['hits']['total']['value']
|
||||
end
|
||||
|
|
@ -21,10 +17,6 @@ module ActiveContext
|
|||
yield hit['_source']
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :result
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -18,12 +18,11 @@ module ActiveContext
|
|||
@options = options
|
||||
end
|
||||
|
||||
def search(collection:, query:)
|
||||
raise ArgumentError, "Expected Query object, you used #{query.class}" unless query.is_a?(ActiveContext::Query)
|
||||
|
||||
def search(user:, collection:, query:)
|
||||
es_query = Processor.transform(collection, query)
|
||||
res = client.search(index: collection, body: es_query)
|
||||
QueryResult.new(res)
|
||||
result = client.search(index: collection.collection_name, body: es_query)
|
||||
|
||||
QueryResult.new(result: result, collection: collection, user: user).authorized_results
|
||||
end
|
||||
|
||||
def client
|
||||
|
|
|
|||
|
|
@ -6,10 +6,6 @@ module ActiveContext
|
|||
class QueryResult
|
||||
include ActiveContext::Databases::Concerns::QueryResult
|
||||
|
||||
def initialize(result)
|
||||
@result = result
|
||||
end
|
||||
|
||||
def count
|
||||
result['hits']['total']['value']
|
||||
end
|
||||
|
|
@ -21,10 +17,6 @@ module ActiveContext
|
|||
yield hit['_source']
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :result
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -21,14 +21,14 @@ module ActiveContext
|
|||
setup_connection_pool
|
||||
end
|
||||
|
||||
def search(collection:, query:)
|
||||
raise ArgumentError, "Expected Query object, you used #{query.class}" unless query.is_a?(ActiveContext::Query)
|
||||
def search(user:, collection:, query:)
|
||||
sql = Processor.transform(collection.collection_name, query)
|
||||
|
||||
sql = Processor.transform(collection, query)
|
||||
res = with_connection do |conn|
|
||||
result = with_connection do |conn|
|
||||
conn.execute(sql)
|
||||
end
|
||||
QueryResult.new(res)
|
||||
|
||||
QueryResult.new(result: result, collection: collection, user: user).authorized_results
|
||||
end
|
||||
|
||||
def bulk_process(operations)
|
||||
|
|
|
|||
|
|
@ -6,29 +6,21 @@ module ActiveContext
|
|||
class QueryResult
|
||||
include ActiveContext::Databases::Concerns::QueryResult
|
||||
|
||||
def initialize(pg_result)
|
||||
@pg_result = pg_result
|
||||
end
|
||||
|
||||
def each
|
||||
return enum_for(:each) unless block_given?
|
||||
|
||||
pg_result.each do |row|
|
||||
result.each do |row|
|
||||
yield row
|
||||
end
|
||||
end
|
||||
|
||||
def count
|
||||
pg_result.ntuples
|
||||
result.ntuples
|
||||
end
|
||||
|
||||
def clear
|
||||
pg_result.clear if pg_result.respond_to?(:clear)
|
||||
result.clear if result.respond_to?(:clear)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :pg_result
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,162 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require 'ostruct'
|
||||
|
||||
RSpec.describe ActiveContext::Concerns::Collection do
|
||||
let(:collection_class) { Test::Collections::Mock }
|
||||
let(:mock_object) { double(id: 123) }
|
||||
let(:collection_record) { double(id: 456) }
|
||||
let(:reference_instance) { instance_double(Test::References::Mock) }
|
||||
let(:mock_adapter) { double }
|
||||
let(:search_results) { double(ids: %w[1 2 3], user: user) }
|
||||
let(:user) { double }
|
||||
|
||||
before do
|
||||
allow(ActiveContext::CollectionCache).to receive(:fetch)
|
||||
.with(collection_class.collection_name)
|
||||
.and_return(collection_record)
|
||||
allow(ActiveContext).to receive(:adapter).and_return(mock_adapter)
|
||||
end
|
||||
|
||||
describe '.track!' do
|
||||
it 'delegates to ActiveContext::Tracker' do
|
||||
objects = [mock_object]
|
||||
|
||||
expect(ActiveContext::Tracker).to receive(:track!).with(objects, collection: collection_class)
|
||||
|
||||
collection_class.track!(*objects)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.search' do
|
||||
it 'delegates to ActiveContext adapter' do
|
||||
query = 'test query'
|
||||
|
||||
expect(mock_adapter).to receive(:search).with(query: query, user: user, collection: collection_class)
|
||||
|
||||
collection_class.search(user: user, query: query)
|
||||
end
|
||||
end
|
||||
|
||||
describe '.collection_record' do
|
||||
it 'fetches from CollectionCache' do
|
||||
expect(ActiveContext::CollectionCache).to receive(:fetch).with(collection_class.collection_name)
|
||||
|
||||
collection_class.collection_record
|
||||
end
|
||||
end
|
||||
|
||||
describe '.redact_unauthorized_results!' do
|
||||
let(:object1) { double(id: '1') }
|
||||
let(:object2) { double(id: '2') }
|
||||
let(:object3) { double(id: '3') }
|
||||
let(:ids) { %w[2 3 1] }
|
||||
let(:objects) { [object1, object2, object3] }
|
||||
let(:search_results) { double(ids: ids, user: user) }
|
||||
|
||||
before do
|
||||
allow(collection_class).to receive(:ids_to_objects).with(ids).and_return(objects)
|
||||
end
|
||||
|
||||
it 'preserves the order of IDs in the authorized results' do
|
||||
allow(collection_class).to receive(:authorized_to_see_object?).with(user, object1).and_return(true)
|
||||
allow(collection_class).to receive(:authorized_to_see_object?).with(user, object2).and_return(true)
|
||||
allow(collection_class).to receive(:authorized_to_see_object?).with(user, object3).and_return(false)
|
||||
|
||||
result = collection_class.redact_unauthorized_results!(search_results)
|
||||
|
||||
expect(result).to eq([object2, object1])
|
||||
end
|
||||
|
||||
it 'filters out unauthorized results' do
|
||||
allow(collection_class).to receive(:authorized_to_see_object?).with(user, object1).and_return(false)
|
||||
allow(collection_class).to receive(:authorized_to_see_object?).with(user, object2).and_return(true)
|
||||
allow(collection_class).to receive(:authorized_to_see_object?).with(user, object3).and_return(false)
|
||||
|
||||
result = collection_class.redact_unauthorized_results!(search_results)
|
||||
|
||||
expect(result).to eq([object2])
|
||||
end
|
||||
end
|
||||
|
||||
describe '#references' do
|
||||
let(:collection_instance) { collection_class.new(mock_object) }
|
||||
|
||||
before do
|
||||
allow(collection_class).to receive(:routing).with(mock_object).and_return(123)
|
||||
allow(Test::References::Mock).to receive(:serialize).with(collection_id: 456, routing: 123,
|
||||
data: mock_object).and_return(reference_instance)
|
||||
end
|
||||
|
||||
it 'creates references for the object' do
|
||||
expect(collection_instance.references).to eq([reference_instance])
|
||||
end
|
||||
|
||||
context 'with multiple reference classes' do
|
||||
let(:reference_instance2) { instance_double(Test::References::Mock) }
|
||||
let(:reference_class2) { class_double(Test::References::Mock) }
|
||||
|
||||
before do
|
||||
allow(collection_class).to receive(:reference_klasses).and_return([Test::References::Mock, reference_class2])
|
||||
allow(reference_class2).to receive(:serialize).with(collection_id: 456, routing: 123,
|
||||
data: mock_object).and_return(reference_instance2)
|
||||
end
|
||||
|
||||
it 'creates references for each reference class' do
|
||||
expect(collection_instance.references).to eq([reference_instance, reference_instance2])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '.reference_klasses' do
|
||||
context 'when reference_klass is defined' do
|
||||
it 'returns an array with the reference_klass' do
|
||||
expect(collection_class.reference_klasses).to eq([Test::References::Mock])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when reference_klass is not defined' do
|
||||
let(:invalid_collection_class) do
|
||||
Class.new do
|
||||
include ActiveContext::Concerns::Collection
|
||||
|
||||
def self.reference_klass
|
||||
nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it 'raises NotImplementedError' do
|
||||
expect do
|
||||
invalid_collection_class.reference_klasses
|
||||
end.to raise_error(NotImplementedError,
|
||||
/should define reference_klasses or reference_klass/)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'required interface methods' do
|
||||
let(:base_collection_class) do
|
||||
Class.new do
|
||||
include ActiveContext::Concerns::Collection
|
||||
end
|
||||
end
|
||||
|
||||
it 'requires collection_name to be implemented' do
|
||||
expect { base_collection_class.collection_name }.to raise_error(NotImplementedError)
|
||||
end
|
||||
|
||||
it 'requires queue to be implemented' do
|
||||
expect { base_collection_class.queue }.to raise_error(NotImplementedError)
|
||||
end
|
||||
|
||||
it 'requires routing to be implemented' do
|
||||
expect { base_collection_class.routing(nil) }.to raise_error(NotImplementedError)
|
||||
end
|
||||
|
||||
it 'requires ids_to_objects to be implemented' do
|
||||
expect { base_collection_class.ids_to_objects(nil) }.to raise_error(NotImplementedError)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
RSpec.describe ActiveContext::Databases::Elasticsearch::Client do
|
||||
let(:options) { { url: 'http://localhost:9200' } }
|
||||
let(:user) { double }
|
||||
let(:collection) { double }
|
||||
|
||||
subject(:client) { described_class.new(options) }
|
||||
|
||||
|
|
@ -13,16 +15,12 @@ RSpec.describe ActiveContext::Databases::Elasticsearch::Client do
|
|||
before do
|
||||
allow(client).to receive(:client).and_return(elasticsearch_client)
|
||||
allow(elasticsearch_client).to receive(:search).and_return(search_response)
|
||||
allow(collection).to receive_messages(collection_name: 'test', redact_unauthorized_results!: [[], []])
|
||||
end
|
||||
|
||||
it 'calls search on the Elasticsearch client' do
|
||||
expect(elasticsearch_client).to receive(:search)
|
||||
client.search(collection: 'test', query: query)
|
||||
end
|
||||
|
||||
it 'returns a QueryResult object' do
|
||||
result = client.search(collection: 'test', query: query)
|
||||
expect(result).to be_a(ActiveContext::Databases::Elasticsearch::QueryResult)
|
||||
client.search(collection: collection, query: query, user: user)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.describe ActiveContext::Databases::Elasticsearch::QueryResult do
|
||||
let(:collection) { double(:collection) }
|
||||
let(:user) { double(:user) }
|
||||
let(:elasticsearch_result) do
|
||||
{
|
||||
'hits' => {
|
||||
|
|
@ -13,7 +15,11 @@ RSpec.describe ActiveContext::Databases::Elasticsearch::QueryResult do
|
|||
}
|
||||
end
|
||||
|
||||
subject(:query_result) { described_class.new(elasticsearch_result) }
|
||||
subject(:query_result) { described_class.new(result: elasticsearch_result, collection: collection, user: user) }
|
||||
|
||||
before do
|
||||
allow(collection).to receive_messages(redact_unauthorized_results!: [[], []])
|
||||
end
|
||||
|
||||
describe '#count' do
|
||||
it 'returns the total number of hits' do
|
||||
|
|
@ -42,4 +48,23 @@ RSpec.describe ActiveContext::Databases::Elasticsearch::QueryResult do
|
|||
expect(query_result.select { |hit| hit['id'] == 1 }).to eq([{ 'id' => 1, 'name' => 'test1' }])
|
||||
end
|
||||
end
|
||||
|
||||
describe '#authorized_results' do
|
||||
let(:authorized_records) { [{ 'id' => 1, 'name' => 'test1' }] }
|
||||
|
||||
before do
|
||||
allow(collection).to receive(:redact_unauthorized_results!).with(query_result).and_return(authorized_records)
|
||||
end
|
||||
|
||||
it 'delegates to collection.redact_unauthorized_results!' do
|
||||
expect(query_result.authorized_results).to eq(authorized_records)
|
||||
expect(collection).to have_received(:redact_unauthorized_results!).with(query_result)
|
||||
end
|
||||
|
||||
it 'memoizes the result' do
|
||||
2.times { query_result.authorized_results }
|
||||
|
||||
expect(collection).to have_received(:redact_unauthorized_results!).with(query_result).once
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
RSpec.describe ActiveContext::Databases::Opensearch::Client do
|
||||
let(:options) { { url: 'http://localhost:9200' } }
|
||||
let(:user) { double }
|
||||
let(:collection) { double }
|
||||
|
||||
subject(:client) { described_class.new(options) }
|
||||
|
||||
|
|
@ -13,16 +15,12 @@ RSpec.describe ActiveContext::Databases::Opensearch::Client do
|
|||
before do
|
||||
allow(client).to receive(:client).and_return(opensearch_client)
|
||||
allow(opensearch_client).to receive(:search).and_return(search_response)
|
||||
allow(collection).to receive_messages(collection_name: 'test', redact_unauthorized_results!: [[], []])
|
||||
end
|
||||
|
||||
it 'calls search on the Opensearch client' do
|
||||
expect(opensearch_client).to receive(:search)
|
||||
client.search(collection: 'test', query: query)
|
||||
end
|
||||
|
||||
it 'returns a QueryResult object' do
|
||||
result = client.search(collection: 'test', query: query)
|
||||
expect(result).to be_a(ActiveContext::Databases::Opensearch::QueryResult)
|
||||
client.search(collection: collection, query: query, user: user)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.describe ActiveContext::Databases::Opensearch::QueryResult do
|
||||
let(:collection) { double(:collection) }
|
||||
let(:user) { double(:user) }
|
||||
let(:elasticsearch_result) do
|
||||
{
|
||||
'hits' => {
|
||||
|
|
@ -13,7 +15,11 @@ RSpec.describe ActiveContext::Databases::Opensearch::QueryResult do
|
|||
}
|
||||
end
|
||||
|
||||
subject(:query_result) { described_class.new(elasticsearch_result) }
|
||||
subject(:query_result) { described_class.new(result: elasticsearch_result, collection: collection, user: user) }
|
||||
|
||||
before do
|
||||
allow(collection).to receive_messages(redact_unauthorized_results!: [[], []])
|
||||
end
|
||||
|
||||
describe '#count' do
|
||||
it 'returns the total number of hits' do
|
||||
|
|
|
|||
|
|
@ -215,6 +215,8 @@ RSpec.describe ActiveContext::Databases::Postgresql::Client do
|
|||
let(:connection_pool) { instance_double(ActiveRecord::ConnectionAdapters::ConnectionPool) }
|
||||
let(:ar_connection) { instance_double(ActiveRecord::ConnectionAdapters::PostgreSQLAdapter) }
|
||||
let(:connection_model) { class_double(ActiveRecord::Base) }
|
||||
let(:collection) { double }
|
||||
let(:user) { double }
|
||||
|
||||
before do
|
||||
allow_any_instance_of(described_class).to receive(:create_connection_model)
|
||||
|
|
@ -231,6 +233,7 @@ RSpec.describe ActiveContext::Databases::Postgresql::Client do
|
|||
|
||||
allow(raw_connection).to receive(:server_version).and_return(120000)
|
||||
allow(ActiveContext::Databases::Postgresql::QueryResult).to receive(:new)
|
||||
allow(collection).to receive_messages(collection_name: 'test', redact_unauthorized_results!: [[], []])
|
||||
|
||||
allow(ActiveContext::Databases::Postgresql::Processor).to receive(:transform)
|
||||
.and_return('SELECT * FROM pg_stat_activity')
|
||||
|
|
@ -239,9 +242,9 @@ RSpec.describe ActiveContext::Databases::Postgresql::Client do
|
|||
it 'executes query and returns QueryResult' do
|
||||
expect(ar_connection).to receive(:execute).with('SELECT * FROM pg_stat_activity')
|
||||
expect(ActiveContext::Databases::Postgresql::QueryResult)
|
||||
.to receive(:new).with(query_result)
|
||||
.to receive(:new).with(result: query_result, collection: collection, user: user).and_call_original
|
||||
|
||||
client.search(collection: 'test', query: ActiveContext::Query.filter(project_id: 1))
|
||||
client.search(collection: collection, query: ActiveContext::Query.filter(project_id: 1), user: user)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.describe ActiveContext::Databases::Postgresql::QueryResult do
|
||||
let(:collection) { double(:collection) }
|
||||
let(:user) { double(:user) }
|
||||
let(:pg_result) { instance_double(PG::Result) }
|
||||
|
||||
subject(:query_result) { described_class.new(pg_result) }
|
||||
subject(:query_result) { described_class.new(result: pg_result, collection: collection, user: user) }
|
||||
|
||||
before do
|
||||
allow(collection).to receive_messages(redact_unauthorized_results!: [[], []])
|
||||
end
|
||||
|
||||
describe '#each' do
|
||||
it 'yields each row' do
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Test
|
||||
module Collections
|
||||
class Mock
|
||||
include ::ActiveContext::Concerns::Collection
|
||||
|
||||
def self.collection_name
|
||||
'test_mock_collection'
|
||||
end
|
||||
|
||||
def self.queue
|
||||
'test_queue'
|
||||
end
|
||||
|
||||
def self.reference_klass
|
||||
Test::References::Mock
|
||||
end
|
||||
|
||||
def self.routing(object)
|
||||
object.id
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -252,7 +252,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def redis_state(&block)
|
||||
Gitlab::Redis::BufferedCounter.with(&block)
|
||||
Gitlab::Redis::SharedState.with(&block)
|
||||
end
|
||||
|
||||
def with_exclusive_lease(&block)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ module Gitlab
|
|||
# This will make sure the connection pool is initialized on application boot in
|
||||
# config/initializers/7_redis.rb, instrumented, and used in health- & readiness checks.
|
||||
ALL_CLASSES = [
|
||||
Gitlab::Redis::BufferedCounter,
|
||||
Gitlab::Redis::Cache,
|
||||
Gitlab::Redis::DbLoadBalancing,
|
||||
Gitlab::Redis::FeatureFlag,
|
||||
|
|
|
|||
|
|
@ -1,17 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module Redis
|
||||
class BufferedCounter < ::Gitlab::Redis::MultiStoreWrapper
|
||||
class << self
|
||||
def config_fallback
|
||||
SharedState
|
||||
end
|
||||
|
||||
def multistore
|
||||
MultiStore.create_using_pool(SharedState.pool, pool, store_name)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -8126,6 +8126,9 @@ msgstr ""
|
|||
msgid "Approved"
|
||||
msgstr ""
|
||||
|
||||
msgid "Approved by"
|
||||
msgstr ""
|
||||
|
||||
msgid "Approved by others"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -8145,9 +8148,6 @@ msgstr[1] ""
|
|||
msgid "Approved the current merge request."
|
||||
msgstr ""
|
||||
|
||||
msgid "Approved-By"
|
||||
msgstr ""
|
||||
|
||||
msgid "Approver"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -21028,15 +21028,15 @@ msgstr ""
|
|||
msgid "DeployTokens|Your new project deploy token has been created."
|
||||
msgstr ""
|
||||
|
||||
msgid "Deployed after"
|
||||
msgstr ""
|
||||
|
||||
msgid "Deployed before"
|
||||
msgstr ""
|
||||
|
||||
msgid "Deployed to"
|
||||
msgstr ""
|
||||
|
||||
msgid "Deployed-after"
|
||||
msgstr ""
|
||||
|
||||
msgid "Deployed-before"
|
||||
msgstr ""
|
||||
|
||||
msgid "Deploying to"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -37474,9 +37474,6 @@ msgstr ""
|
|||
msgid "Merged this merge request."
|
||||
msgstr ""
|
||||
|
||||
msgid "Merged-By"
|
||||
msgstr ""
|
||||
|
||||
msgid "Merged: %{merged}"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -50957,9 +50954,6 @@ msgstr ""
|
|||
msgid "Runners|Before you begin"
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|Can run untagged jobs"
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|Capacity of 1 enables warm HA through Auto Scaling group re-spawn. Capacity of 2 enables hot HA because the service is available even when a node is lost. Capacity of 3 or more enables hot HA and manual scaling of runner fleet."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -51226,9 +51220,6 @@ msgstr ""
|
|||
msgid "Runners|Learn more in the %{linkStart}Google Cloud documentation%{linkEnd}."
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|Locked to this project"
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|Machine type"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -51428,9 +51419,6 @@ msgstr ""
|
|||
msgid "Runners|Project › CI/CD Settings › Runners"
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|Property Name"
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|Protected"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -51491,18 +51479,12 @@ msgstr ""
|
|||
msgid "Runners|Resume accepting jobs"
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|Revision"
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|Run the following command to enable the required services and create a service account with the required permissions. Only do this once for each Google Cloud project. You might be prompted to sign in to Google."
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|Runner"
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|Runner #%{runner_id}"
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|Runner %{name} was deleted"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -51822,9 +51804,6 @@ msgstr ""
|
|||
msgid "Runners|Use the runner on pipelines for protected branches only."
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|Value"
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|Version"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -51888,15 +51867,6 @@ msgstr ""
|
|||
msgid "Runners|Zone must have the right format."
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|group"
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|project"
|
||||
msgstr ""
|
||||
|
||||
msgid "Runners|shared"
|
||||
msgstr ""
|
||||
|
||||
msgid "Runner|1 day selected"
|
||||
msgid_plural "Runner|%d days selected"
|
||||
msgstr[0] ""
|
||||
|
|
|
|||
|
|
@ -24,33 +24,34 @@ module RuboCop
|
|||
class FeatureFlagKeyDynamic < RuboCop::Cop::Base
|
||||
extend AutoCorrector
|
||||
|
||||
MSG = 'First argument to `Feature.%<method>s` must be a literal symbol.'
|
||||
MSG = 'First argument to `%<module>s.%<method>s` must be a literal symbol.'
|
||||
|
||||
FEATURE_METHODS = %i[enabled? disabled?].freeze
|
||||
RESTRICT_ON_SEND = %i[enabled? disabled?].to_set.freeze
|
||||
|
||||
# @!method feature_flag_method?(node)
|
||||
def_node_matcher :feature_flag_method?, <<~PATTERN
|
||||
(send
|
||||
(const nil? :Feature)
|
||||
${:enabled? :disabled?}
|
||||
${(const {nil? cbase} :Feature)}
|
||||
${RESTRICT_ON_SEND}
|
||||
$_
|
||||
...
|
||||
)
|
||||
PATTERN
|
||||
|
||||
# rubocop:disable InternalAffairs/OnSendWithoutOnCSend -- `Feature&.enabled?` is not possible
|
||||
def on_send(node)
|
||||
method_name = feature_flag_method?(node)
|
||||
module_node, method_name, first_arg = feature_flag_method?(node)
|
||||
return unless method_name
|
||||
|
||||
first_arg = node.first_argument
|
||||
return if first_arg&.sym_type?
|
||||
return if first_arg.sym_type?
|
||||
|
||||
message = format(MSG, method: method_name)
|
||||
message = format(MSG, module: module_node.source, method: method_name)
|
||||
|
||||
add_offense(first_arg, message: message) do |corrector|
|
||||
autocorrect(corrector, first_arg) if first_arg&.str_type?
|
||||
autocorrect(corrector, first_arg)
|
||||
end
|
||||
end
|
||||
alias_method :on_csend, :on_send
|
||||
# rubocop:enable InternalAffairs/OnSendWithoutOnCSend
|
||||
|
||||
private
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,24 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe 'User views diffs', :js, feature_category: :code_review_workflow do
|
||||
let(:merge_request) do
|
||||
create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
|
||||
end
|
||||
|
||||
let_it_be(:project) { create(:project, :public, :repository) }
|
||||
let(:view) { 'inline' }
|
||||
let(:last_commit_text) { 'Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9' }
|
||||
|
||||
before do
|
||||
stub_feature_flags(rapid_diffs: true)
|
||||
visit(diffs_project_merge_request_path(project, merge_request, view: view, rapid_diffs: true))
|
||||
|
||||
wait_for_requests
|
||||
end
|
||||
|
||||
it 'shows the last diff file' do
|
||||
expect(page).to have_selector('[data-testid="rd-diff-file"]', text: last_commit_text)
|
||||
end
|
||||
end
|
||||
|
|
@ -38,9 +38,9 @@ RSpec.describe 'Merge Requests > User filters', :js, feature_category: :code_rev
|
|||
end
|
||||
end
|
||||
|
||||
context 'filtering by approved-by:none' do
|
||||
context 'filtering by approved by:none' do
|
||||
it 'applies the filter' do
|
||||
select_tokens 'Approved-By', '=', 'None', submit: true
|
||||
select_tokens 'Approved by', '=', 'None', submit: true
|
||||
|
||||
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
|
||||
|
||||
|
|
@ -50,9 +50,9 @@ RSpec.describe 'Merge Requests > User filters', :js, feature_category: :code_rev
|
|||
end
|
||||
end
|
||||
|
||||
context 'filtering by approved-by:any' do
|
||||
context 'filtering by approved by:any' do
|
||||
it 'applies the filter' do
|
||||
select_tokens 'Approved-By', '=', 'Any', submit: true
|
||||
select_tokens 'Approved by', '=', 'Any', submit: true
|
||||
|
||||
expect(page).to have_issuable_counts(open: 2, closed: 0, all: 2)
|
||||
|
||||
|
|
@ -61,9 +61,9 @@ RSpec.describe 'Merge Requests > User filters', :js, feature_category: :code_rev
|
|||
end
|
||||
end
|
||||
|
||||
context 'filtering by approved-by:@username' do
|
||||
context 'filtering by approved by:@username' do
|
||||
it 'applies the filter' do
|
||||
select_tokens 'Approved-By', '=', first_user.username, submit: true
|
||||
select_tokens 'Approved by', '=', first_user.username, submit: true
|
||||
|
||||
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
|
||||
|
||||
|
|
@ -74,7 +74,7 @@ RSpec.describe 'Merge Requests > User filters', :js, feature_category: :code_rev
|
|||
|
||||
context 'filtering by an approver from a group' do
|
||||
it 'applies the filter' do
|
||||
select_tokens 'Approved-By', '=', group_user.username, submit: true
|
||||
select_tokens 'Approved by', '=', group_user.username, submit: true
|
||||
|
||||
expect(page).to have_issuable_counts(open: 1, closed: 0, all: 1)
|
||||
|
||||
|
|
|
|||
|
|
@ -64,9 +64,9 @@ RSpec.describe 'Merge Requests > User filters by deployments', :js, feature_cate
|
|||
visit(project_merge_requests_path(project, state: :merged))
|
||||
end
|
||||
|
||||
describe 'filtering by deployed-before' do
|
||||
describe 'filtering by deployed before' do
|
||||
it 'applies the filter' do
|
||||
select_tokens 'Deployed-before'
|
||||
select_tokens 'Deployed before'
|
||||
find_by_testid('filtered-search-token-segment-input').send_keys '2020-10-02'
|
||||
|
||||
send_keys :enter
|
||||
|
|
@ -76,9 +76,9 @@ RSpec.describe 'Merge Requests > User filters by deployments', :js, feature_cate
|
|||
end
|
||||
end
|
||||
|
||||
describe 'filtering by deployed-after' do
|
||||
describe 'filtering by deployed after' do
|
||||
it 'applies the filter' do
|
||||
select_tokens 'Deployed-after'
|
||||
select_tokens 'Deployed after'
|
||||
find_by_testid('filtered-search-token-segment-input').send_keys '2020-10-01'
|
||||
|
||||
send_keys :enter
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ RSpec.describe 'Maintainer manages project runners', feature_category: :fleet_vi
|
|||
create(:ci_runner_machine, runner: project_runner, platform: 'darwin')
|
||||
end
|
||||
|
||||
it 'user sees the project runner' do
|
||||
it 'user sees the project runner', :js do
|
||||
visit project_runners_path(project)
|
||||
|
||||
within_testid 'assigned_project_runners' do
|
||||
|
|
@ -30,7 +30,9 @@ RSpec.describe 'Maintainer manages project runners', feature_category: :fleet_vi
|
|||
|
||||
click_on project_runner.short_sha
|
||||
|
||||
expect(page).to have_content(project_runner_manager.platform)
|
||||
wait_for_requests
|
||||
|
||||
expect(page).to have_content(project_runner.description)
|
||||
end
|
||||
|
||||
it 'user can pause and resume the project runner' do
|
||||
|
|
@ -75,7 +77,7 @@ RSpec.describe 'Maintainer manages project runners', feature_category: :fleet_vi
|
|||
check 'protected'
|
||||
click_button 'Save changes'
|
||||
|
||||
expect(page).to have_content 'Protected Yes'
|
||||
expect(page).to have_content 'Protected'
|
||||
end
|
||||
|
||||
context 'when a runner has a tag', :js do
|
||||
|
|
@ -95,7 +97,7 @@ RSpec.describe 'Maintainer manages project runners', feature_category: :fleet_vi
|
|||
uncheck 'run-untagged'
|
||||
click_button 'Save changes'
|
||||
|
||||
expect(page).to have_content 'Can run untagged jobs No'
|
||||
expect(page).not_to have_content 'Runs untagged jobs'
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,145 @@
|
|||
import Vue from 'vue';
|
||||
import VueRouter from 'vue-router';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { createAlert } from '~/alert';
|
||||
|
||||
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
|
||||
import RunnerHeader from '~/ci/runner/components/runner_header.vue';
|
||||
import RunnerDetails from '~/ci/runner/components/runner_details.vue';
|
||||
import RunnerDetailsTabs from '~/ci/runner/components/runner_details_tabs.vue';
|
||||
import RunnersJobs from '~/ci/runner/components/runner_jobs.vue';
|
||||
|
||||
import runnerQuery from '~/ci/runner/graphql/show/runner.query.graphql';
|
||||
import ProjectRunnerShowApp from '~/ci/runner/project_runner_show/project_runner_show_app.vue';
|
||||
import { captureException } from '~/ci/runner/sentry_utils';
|
||||
|
||||
import { runnerData } from '../mock_data';
|
||||
|
||||
jest.mock('~/alert');
|
||||
jest.mock('~/ci/runner/sentry_utils');
|
||||
|
||||
const mockRunner = runnerData.data.runner;
|
||||
const mockRunnerGraphqlId = mockRunner.id;
|
||||
const mockRunnerId = `${getIdFromGraphQLId(mockRunnerGraphqlId)}`;
|
||||
const mockRunnerSha = mockRunner.shortSha;
|
||||
|
||||
Vue.use(VueApollo);
|
||||
Vue.use(VueRouter);
|
||||
|
||||
describe('AdminRunnerShowApp', () => {
|
||||
let wrapper;
|
||||
let mockRunnerQuery;
|
||||
|
||||
const findRunnerHeader = () => wrapper.findComponent(RunnerHeader);
|
||||
const findRunnerDetails = () => wrapper.findComponent(RunnerDetails);
|
||||
const findRunnerDetailsTabs = () => wrapper.findComponent(RunnerDetailsTabs);
|
||||
const findRunnersJobs = () => wrapper.findComponent(RunnersJobs);
|
||||
|
||||
const mockRunnerQueryResult = (runner = {}) => {
|
||||
mockRunnerQuery = jest.fn().mockResolvedValue({
|
||||
data: {
|
||||
runner: { ...mockRunner, ...runner },
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const createComponent = ({ props = {}, mountFn = shallowMountExtended, ...options } = {}) => {
|
||||
wrapper = mountFn(ProjectRunnerShowApp, {
|
||||
apolloProvider: createMockApollo([[runnerQuery, mockRunnerQuery]]),
|
||||
propsData: {
|
||||
runnerId: mockRunnerId,
|
||||
...props,
|
||||
},
|
||||
...options,
|
||||
});
|
||||
|
||||
return waitForPromises();
|
||||
};
|
||||
|
||||
afterEach(() => {
|
||||
mockRunnerQuery.mockReset();
|
||||
});
|
||||
|
||||
describe('When showing runner details', () => {
|
||||
beforeEach(async () => {
|
||||
mockRunnerQueryResult();
|
||||
|
||||
await createComponent({ mountFn: mountExtended });
|
||||
});
|
||||
|
||||
it('expect GraphQL ID to be requested', () => {
|
||||
expect(mockRunnerQuery).toHaveBeenCalledWith({ id: mockRunnerGraphqlId });
|
||||
});
|
||||
|
||||
it('displays the runner header', () => {
|
||||
expect(findRunnerHeader().text()).toContain(`#${mockRunnerId} (${mockRunnerSha})`);
|
||||
});
|
||||
|
||||
it('shows runner details', () => {
|
||||
expect(findRunnerDetailsTabs().props('runner')).toEqual(mockRunner);
|
||||
});
|
||||
|
||||
it('shows basic runner details', async () => {
|
||||
await createComponent({
|
||||
mountFn: mountExtended,
|
||||
stubs: {
|
||||
HelpPopover: {
|
||||
template: '<div/>',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const expected = `Description My Runner
|
||||
Last contact Never contacted
|
||||
Configuration Runs untagged jobs
|
||||
Maximum job timeout None
|
||||
Token expiry Never expires
|
||||
Tags None`.replace(/\s+/g, ' ');
|
||||
|
||||
expect(wrapper.text().replace(/\s+/g, ' ')).toContain(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('When loading', () => {
|
||||
it('does not show runner details', () => {
|
||||
mockRunnerQueryResult();
|
||||
|
||||
createComponent();
|
||||
|
||||
expect(findRunnerDetails().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('does not show runner jobs', () => {
|
||||
mockRunnerQueryResult();
|
||||
|
||||
createComponent();
|
||||
|
||||
expect(findRunnersJobs().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('When there is an error', () => {
|
||||
beforeEach(async () => {
|
||||
mockRunnerQuery = jest.fn().mockRejectedValueOnce(new Error('Error!'));
|
||||
await createComponent();
|
||||
});
|
||||
|
||||
it('does not show runner details', () => {
|
||||
expect(findRunnerDetails().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('error is reported to sentry', () => {
|
||||
expect(captureException).toHaveBeenCalledWith({
|
||||
error: new Error('Error!'),
|
||||
component: 'ProjectRunnerShowApp',
|
||||
});
|
||||
});
|
||||
|
||||
it('error is shown to the user', () => {
|
||||
expect(createAlert).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -89,3 +89,48 @@ export const crmContactsMock = [
|
|||
set: undefined, // On purpose
|
||||
},
|
||||
];
|
||||
|
||||
export const mockIssues = [
|
||||
{
|
||||
title: 'Issue 1',
|
||||
iid: 1,
|
||||
reference: 'group/project#1',
|
||||
workItemType: { iconName: 'issues' },
|
||||
},
|
||||
{
|
||||
title: 'Issue 2',
|
||||
iid: 2,
|
||||
reference: 'group/project#2',
|
||||
workItemType: { iconName: 'issues' },
|
||||
},
|
||||
];
|
||||
|
||||
export const mockAssignees = [
|
||||
{
|
||||
__typename: 'UserCore',
|
||||
id: 'gid://gitlab/User/1',
|
||||
avatarUrl: '',
|
||||
name: 'Administrator',
|
||||
username: 'root',
|
||||
webUrl: 'http://127.0.0.1:3000/root',
|
||||
webPath: '/root',
|
||||
},
|
||||
{
|
||||
__typename: 'UserCore',
|
||||
id: 'gid://gitlab/User/9',
|
||||
avatarUrl: '',
|
||||
name: 'Carla Weissnat',
|
||||
username: 'milford',
|
||||
webUrl: 'http://127.0.0.1:3000/milford',
|
||||
webPath: '/milford',
|
||||
},
|
||||
{
|
||||
__typename: 'UserCore',
|
||||
id: 'gid://gitlab/User/16',
|
||||
avatarUrl: '',
|
||||
name: 'Carol Hagenes',
|
||||
username: 'nancee_simonis',
|
||||
webUrl: 'http://127.0.0.1:3000/nancee_simonis',
|
||||
webPath: '/nancee_simonis',
|
||||
},
|
||||
];
|
||||
|
|
|
|||
|
|
@ -18,16 +18,19 @@ import waitForPromises from 'helpers/wait_for_promises';
|
|||
import AjaxCache from '~/lib/utils/ajax_cache';
|
||||
import axios from '~/lib/utils/axios_utils';
|
||||
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
|
||||
import { linkedItems } from '~/graphql_shared/issuable_client';
|
||||
import { linkedItems, currentAssignees } from '~/graphql_shared/issuable_client';
|
||||
import {
|
||||
eventlistenersMockDefaultMap,
|
||||
crmContactsMock,
|
||||
mockIssues,
|
||||
mockAssignees,
|
||||
} from 'ee_else_ce_jest/gfm_auto_complete/mock_data';
|
||||
|
||||
const mockSpriteIcons = '/icons.svg';
|
||||
|
||||
jest.mock('~/graphql_shared/issuable_client', () => ({
|
||||
linkedItems: jest.fn(),
|
||||
currentAssignees: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('escape', () => {
|
||||
|
|
@ -1131,104 +1134,6 @@ describe('GfmAutoComplete', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('unlink', () => {
|
||||
let autocomplete;
|
||||
let $textarea;
|
||||
const mockWorkItemFullPath = 'gitlab-test';
|
||||
const mockWorkItemIid = '1';
|
||||
const originalGon = window.gon;
|
||||
const dataSources = {
|
||||
issues: `${TEST_HOST}/autocomplete_sources/issues`,
|
||||
};
|
||||
const mockIssues = [
|
||||
{
|
||||
title: 'Issue 1',
|
||||
iid: 1,
|
||||
reference: 'group/project#1',
|
||||
workItemType: { iconName: 'issues' },
|
||||
},
|
||||
{
|
||||
title: 'Issue 2',
|
||||
iid: 2,
|
||||
reference: 'group/project#2',
|
||||
workItemType: { iconName: 'issues' },
|
||||
},
|
||||
];
|
||||
|
||||
const getDropdownItems = () => getAutocompleteDropdownItems('at-view-issues');
|
||||
|
||||
beforeEach(() => {
|
||||
window.gon = {
|
||||
current_user_use_work_items_view: true,
|
||||
};
|
||||
setHTMLFixture(`
|
||||
<section>
|
||||
<div id="linkeditems"
|
||||
data-work-item-full-path="${mockWorkItemFullPath}"
|
||||
data-work-item-iid="${mockWorkItemIid}"></div>
|
||||
<textarea></textarea>
|
||||
</section>
|
||||
`);
|
||||
$textarea = $('textarea');
|
||||
linkedItems.mockImplementation(() => ({
|
||||
[`${mockWorkItemFullPath}:${mockWorkItemIid}`]: [],
|
||||
}));
|
||||
|
||||
autocomplete = new GfmAutoComplete(dataSources);
|
||||
autocomplete.setup($textarea, { issues: true });
|
||||
autocomplete.cachedData['#'] = {
|
||||
// This looks odd but that's how GFMAutoComplete
|
||||
// caches issues data internally.
|
||||
'': [...mockIssues],
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
autocomplete.destroy();
|
||||
resetHTMLFixture();
|
||||
window.gon = originalGon;
|
||||
});
|
||||
|
||||
describe('without any linked issues present', () => {
|
||||
it('using "#" shows all the issues', () => {
|
||||
triggerDropdown($textarea, '#');
|
||||
|
||||
expect(getDropdownItems()).toHaveLength(mockIssues.length);
|
||||
expect(getDropdownItems()).toEqual(mockIssues.map((i) => `${i.reference} ${i.title}`));
|
||||
});
|
||||
|
||||
it('using "/unlink #" shows no issues', () => {
|
||||
triggerDropdown($textarea, '/unlink #');
|
||||
|
||||
expect(getDropdownItems()).toHaveLength(0);
|
||||
expect(linkedItems).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('with linked issue present', () => {
|
||||
beforeEach(() => {
|
||||
linkedItems.mockImplementation(() => ({
|
||||
[`${mockWorkItemFullPath}:${mockWorkItemIid}`]: [mockIssues[1]],
|
||||
}));
|
||||
});
|
||||
|
||||
it('using "#" shows all the issues', () => {
|
||||
triggerDropdown($textarea, '#');
|
||||
|
||||
expect(getDropdownItems()).toHaveLength(mockIssues.length);
|
||||
expect(getDropdownItems()).toEqual(mockIssues.map((i) => `${i.reference} ${i.title}`));
|
||||
});
|
||||
|
||||
it('using "/unlink #" shows only linked issues', () => {
|
||||
triggerDropdown($textarea, '/unlink #');
|
||||
|
||||
expect(getDropdownItems()).toHaveLength(1);
|
||||
expect(getDropdownItems()).toEqual([mockIssues[1]].map((i) => `${i.reference} ${i.title}`));
|
||||
expect(linkedItems).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('autocomplete show eventlisteners', () => {
|
||||
let $textarea;
|
||||
|
||||
|
|
@ -1316,4 +1221,157 @@ describe('GfmAutoComplete', () => {
|
|||
expect(autocomplete.dataSources).toEqual(newDataSources);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Work Items', () => {
|
||||
let autocomplete;
|
||||
let $textarea;
|
||||
const mockWorkItemId = 'gid://gitlab/WorkItem/1';
|
||||
const mockWorkItemFullPath = 'gitlab-test';
|
||||
const mockWorkItemIid = '1';
|
||||
const originalGon = window.gon;
|
||||
const dataSources = {
|
||||
issues: `${TEST_HOST}/autocomplete_sources/issues`,
|
||||
members: `${TEST_HOST}/autocomplete_sources/members`,
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
window.gon = {
|
||||
current_user_use_work_items_view: true,
|
||||
};
|
||||
document.body.dataset.page = 'projects:issues:show';
|
||||
setHTMLFixture(`
|
||||
<section>
|
||||
<div class="js-gfm-wrapper"
|
||||
data-work-item-full-path="${mockWorkItemFullPath}"
|
||||
data-work-item-id="${mockWorkItemId}"
|
||||
data-work-item-iid="${mockWorkItemIid}">
|
||||
<textarea></textarea>
|
||||
</div>
|
||||
</section>
|
||||
`);
|
||||
$textarea = $('textarea');
|
||||
autocomplete = new GfmAutoComplete(dataSources);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
autocomplete.destroy();
|
||||
resetHTMLFixture();
|
||||
window.gon = originalGon;
|
||||
});
|
||||
|
||||
describe('unlink', () => {
|
||||
const getDropdownItems = () => getAutocompleteDropdownItems('at-view-issues');
|
||||
const issueMatcher = (issue) => `${issue.reference} ${issue.title}`;
|
||||
|
||||
beforeEach(() => {
|
||||
linkedItems.mockImplementation(() => ({
|
||||
[`${mockWorkItemFullPath}:${mockWorkItemIid}`]: [],
|
||||
}));
|
||||
|
||||
autocomplete.setup($textarea, { issues: true });
|
||||
autocomplete.cachedData['#'] = {
|
||||
// This looks odd but that's how GFMAutoComplete
|
||||
// caches issues data internally.
|
||||
'': [...mockIssues],
|
||||
};
|
||||
});
|
||||
|
||||
describe('without any linked issues present', () => {
|
||||
it('using "#" shows all the issues', () => {
|
||||
triggerDropdown($textarea, '#');
|
||||
|
||||
expect(getDropdownItems()).toHaveLength(mockIssues.length);
|
||||
expect(getDropdownItems()).toEqual(mockIssues.map(issueMatcher));
|
||||
});
|
||||
|
||||
it('using "/unlink #" shows no issues', () => {
|
||||
triggerDropdown($textarea, '/unlink #');
|
||||
|
||||
expect(getDropdownItems()).toHaveLength(0);
|
||||
expect(linkedItems).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('with linked issue present', () => {
|
||||
beforeEach(() => {
|
||||
linkedItems.mockImplementation(() => ({
|
||||
[`${mockWorkItemFullPath}:${mockWorkItemIid}`]: [mockIssues[1]],
|
||||
}));
|
||||
});
|
||||
|
||||
it('using "#" shows all the issues', () => {
|
||||
triggerDropdown($textarea, '#');
|
||||
|
||||
expect(getDropdownItems()).toHaveLength(mockIssues.length);
|
||||
expect(getDropdownItems()).toEqual(mockIssues.map(issueMatcher));
|
||||
});
|
||||
|
||||
it('using "/unlink #" shows only linked issues', () => {
|
||||
triggerDropdown($textarea, '/unlink #');
|
||||
|
||||
expect(getDropdownItems()).toHaveLength(1);
|
||||
expect(getDropdownItems()).toEqual([mockIssues[1]].map(issueMatcher));
|
||||
expect(linkedItems).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('unassign', () => {
|
||||
const getDropdownItems = () => getAutocompleteDropdownItems('at-view-users');
|
||||
const assigneeMatcher = (user) =>
|
||||
`${user.username.charAt(0).toUpperCase()} ${user.username} ${user.name}`;
|
||||
|
||||
beforeEach(() => {
|
||||
currentAssignees.mockImplementation(() => ({
|
||||
[`${mockWorkItemId}`]: [],
|
||||
}));
|
||||
|
||||
autocomplete.setup($textarea, { members: true });
|
||||
autocomplete.cachedData['@'] = {
|
||||
// This looks odd but that's how GFMAutoComplete
|
||||
// caches issues data internally.
|
||||
'': [...mockAssignees],
|
||||
};
|
||||
});
|
||||
|
||||
describe('without any assignees present', () => {
|
||||
it('using "@" shows all the members', () => {
|
||||
triggerDropdown($textarea, '@');
|
||||
|
||||
expect(getDropdownItems()).toHaveLength(mockAssignees.length);
|
||||
expect(getDropdownItems()).toEqual(mockAssignees.map(assigneeMatcher));
|
||||
});
|
||||
|
||||
it('using "/unassign @" shows no users', () => {
|
||||
triggerDropdown($textarea, '/unassign @');
|
||||
|
||||
expect(getDropdownItems()).toHaveLength(0);
|
||||
expect(currentAssignees).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('with assignees present', () => {
|
||||
beforeEach(() => {
|
||||
currentAssignees.mockImplementation(() => ({
|
||||
[`${mockWorkItemId}`]: [mockAssignees[1]],
|
||||
}));
|
||||
});
|
||||
|
||||
it('using "@" shows all the members', () => {
|
||||
triggerDropdown($textarea, '@');
|
||||
|
||||
expect(getDropdownItems()).toHaveLength(mockAssignees.length);
|
||||
expect(getDropdownItems()).toEqual(mockAssignees.map(assigneeMatcher));
|
||||
});
|
||||
|
||||
it('using "/unassign @" shows only current assignees', () => {
|
||||
triggerDropdown($textarea, '/unassign @');
|
||||
|
||||
expect(getDropdownItems()).toHaveLength(1);
|
||||
expect(getDropdownItems()).toEqual([mockAssignees[1]].map(assigneeMatcher));
|
||||
expect(currentAssignees).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import {
|
|||
} from '~/ide/lib/gitlab_web_ide';
|
||||
import Tracking from '~/tracking';
|
||||
import setWindowLocation from 'helpers/set_window_location_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { renderWebIdeError } from '~/ide/render_web_ide_error';
|
||||
import { getMockCallbackUrl } from './helpers';
|
||||
|
||||
|
|
@ -87,9 +88,6 @@ describe('ide/init_gitlab_web_ide', () => {
|
|||
el.dataset.signInPath = TEST_SIGN_IN_PATH;
|
||||
el.dataset.signOutPath = TEST_SIGN_OUT_PATH;
|
||||
|
||||
getBaseConfig.mockReturnValue(TEST_BASE_CONFIG);
|
||||
isMultiDomainEnabled.mockReturnValue(false);
|
||||
|
||||
document.body.append(el);
|
||||
};
|
||||
const findRootElement = () => document.getElementById(ROOT_ELEMENT_ID);
|
||||
|
|
@ -100,6 +98,9 @@ describe('ide/init_gitlab_web_ide', () => {
|
|||
gon.features = { webIdeLanguageServer: true };
|
||||
process.env.GITLAB_WEB_IDE_PUBLIC_PATH = TEST_GITLAB_WEB_IDE_PUBLIC_PATH;
|
||||
|
||||
getBaseConfig.mockResolvedValue(TEST_BASE_CONFIG);
|
||||
isMultiDomainEnabled.mockReturnValue(false);
|
||||
|
||||
createRootElement();
|
||||
});
|
||||
|
||||
|
|
@ -256,7 +257,7 @@ describe('ide/init_gitlab_web_ide', () => {
|
|||
});
|
||||
|
||||
describe('when extensionMarketplaceSettings is in dataset', () => {
|
||||
function setMockExtensionMarketplaceSettingsDataset(
|
||||
async function setMockExtensionMarketplaceSettingsDataset(
|
||||
mockSettings = TEST_EXTENSION_MARKETPLACE_SETTINGS,
|
||||
) {
|
||||
findRootElement().dataset.extensionMarketplaceSettings = JSON.stringify(mockSettings);
|
||||
|
|
@ -266,10 +267,12 @@ describe('ide/init_gitlab_web_ide', () => {
|
|||
}
|
||||
|
||||
createSubject();
|
||||
|
||||
await waitForPromises();
|
||||
}
|
||||
|
||||
it('calls start with element and extensionsGallerySettings', () => {
|
||||
setMockExtensionMarketplaceSettingsDataset();
|
||||
it('calls start with element and extensionsGallerySettings', async () => {
|
||||
await setMockExtensionMarketplaceSettingsDataset();
|
||||
expect(start).toHaveBeenCalledTimes(1);
|
||||
expect(start).toHaveBeenCalledWith(
|
||||
findRootElement(),
|
||||
|
|
@ -285,8 +288,8 @@ describe('ide/init_gitlab_web_ide', () => {
|
|||
);
|
||||
});
|
||||
|
||||
it('calls start with element and crossOriginExtensionHost flag if extensionMarketplaceSettings is enabled', () => {
|
||||
setMockExtensionMarketplaceSettingsDataset();
|
||||
it('calls start with element and crossOriginExtensionHost flag if extensionMarketplaceSettings is enabled', async () => {
|
||||
await setMockExtensionMarketplaceSettingsDataset();
|
||||
expect(start).toHaveBeenCalledTimes(1);
|
||||
expect(start).toHaveBeenCalledWith(
|
||||
findRootElement(),
|
||||
|
|
@ -300,8 +303,8 @@ describe('ide/init_gitlab_web_ide', () => {
|
|||
);
|
||||
});
|
||||
|
||||
it('calls start with settingsContextHash', () => {
|
||||
setMockExtensionMarketplaceSettingsDataset();
|
||||
it('calls start with settingsContextHash', async () => {
|
||||
await setMockExtensionMarketplaceSettingsDataset();
|
||||
|
||||
expect(start).toHaveBeenCalledTimes(1);
|
||||
expect(start).toHaveBeenCalledWith(
|
||||
|
|
@ -314,13 +317,13 @@ describe('ide/init_gitlab_web_ide', () => {
|
|||
|
||||
it.each(['opt_in_unset', 'opt_in_disabled'])(
|
||||
'calls start with element and crossOriginExtensionHost flag if extensionMarketplaceSettings reason is $reason',
|
||||
(reason) => {
|
||||
async (reason) => {
|
||||
const mockExtensionMarketplaceDisabledSettings = {
|
||||
enabled: false,
|
||||
reason,
|
||||
};
|
||||
|
||||
setMockExtensionMarketplaceSettingsDataset(mockExtensionMarketplaceDisabledSettings);
|
||||
await setMockExtensionMarketplaceSettingsDataset(mockExtensionMarketplaceDisabledSettings);
|
||||
|
||||
expect(start).toHaveBeenCalledTimes(1);
|
||||
expect(start).toHaveBeenCalledWith(
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import {
|
|||
} from '~/ide/lib/gitlab_web_ide/get_base_config';
|
||||
import { isMultiDomainEnabled } from '~/ide/lib/gitlab_web_ide/is_multi_domain_enabled';
|
||||
import { TEST_HOST } from 'helpers/test_constants';
|
||||
import { stubCrypto } from 'helpers/crypto';
|
||||
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
|
||||
|
||||
const TEST_GITLAB_WEB_IDE_PUBLIC_PATH = 'test/gitlab-web-ide/public/path';
|
||||
|
|
@ -14,6 +15,7 @@ jest.mock('~/ide/lib/gitlab_web_ide/is_multi_domain_enabled');
|
|||
|
||||
describe('~/ide/lib/gitlab_web_ide/get_base_config', () => {
|
||||
useMockLocationHelper();
|
||||
stubCrypto();
|
||||
|
||||
beforeEach(() => {
|
||||
// why: add trailing "/" to test that it gets removed
|
||||
|
|
@ -21,8 +23,8 @@ describe('~/ide/lib/gitlab_web_ide/get_base_config', () => {
|
|||
window.gon.relative_url_root = '';
|
||||
});
|
||||
|
||||
it('with default, returns base properties for @gitlab/web-ide config', () => {
|
||||
const actual = getBaseConfig();
|
||||
it('with default, returns base properties for @gitlab/web-ide config', async () => {
|
||||
const actual = await getBaseConfig();
|
||||
|
||||
expect(actual).toEqual({
|
||||
workbenchBaseUrl: `${TEST_HOST}/${TEST_GITLAB_WEB_IDE_PUBLIC_PATH}`,
|
||||
|
|
@ -33,10 +35,10 @@ describe('~/ide/lib/gitlab_web_ide/get_base_config', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('with relative_url_root, returns baseUrl with relative url root', () => {
|
||||
it('with relative_url_root, returns baseUrl with relative url root', async () => {
|
||||
window.gon.relative_url_root = TEST_RELATIVE_URL_ROOT;
|
||||
|
||||
const actual = getBaseConfig();
|
||||
const actual = await getBaseConfig();
|
||||
|
||||
expect(actual).toEqual({
|
||||
workbenchBaseUrl: `${TEST_HOST}${TEST_RELATIVE_URL_ROOT}/${TEST_GITLAB_WEB_IDE_PUBLIC_PATH}`,
|
||||
|
|
@ -49,18 +51,22 @@ describe('~/ide/lib/gitlab_web_ide/get_base_config', () => {
|
|||
|
||||
describe('generateWorkbenchSubdomain', () => {
|
||||
it.each`
|
||||
origin | currentUsername | result
|
||||
${'https://example.com'} | ${'foobar'} | ${'aHR0cHM6Ly9leGFtcGxlLmNvbS1mb29iYXI'}
|
||||
${'https://ide.example.com'} | ${'barfoo'} | ${'aHR0cHM6Ly9pZGUuZXhhbXBsZS5jb20tYmFyZm9v'}
|
||||
${'https://ide.example.com'} | ${'bar.foo'} | ${'aHR0cHM6Ly9pZGUuZXhhbXBsZS5jb20tYmFyLmZvbw'}
|
||||
${'https://ide.example.com'} | ${'bar+foo'} | ${'aHR0cHM6Ly9pZGUuZXhhbXBsZS5jb20tYmFyK2Zvbw'}
|
||||
origin | currentUsername | result
|
||||
${'https://example.com'} | ${'foobar'} | ${'ee2af4a14057872bd8c7463645f503'}
|
||||
${'https://ide.example.com'} | ${'barfoo'} | ${'ae3f10e196eac8ef4045e3ec9ba4a5'}
|
||||
${'https://ide.example.com'} | ${'bar.foo'} | ${'5bfda1a3ce2b366a1491aba48eba08'}
|
||||
${'https://ide.example.com'} | ${'bar+foo'} | ${'b6a09e91b3b97cc3b4f70cf6dfa1dd'}
|
||||
${'https://ide.example.com'} | ${'bar+foo+bar+foo+bar+foo '} | ${'f16f0302f14b7026753d426915bef7'}
|
||||
`(
|
||||
'returns $result when origin is $origin and currentUsername is $currentUsername',
|
||||
({ origin, currentUsername, result }) => {
|
||||
async ({ origin, currentUsername, result }) => {
|
||||
window.location.origin = origin;
|
||||
window.gon.current_username = currentUsername;
|
||||
|
||||
expect(generateWorkbenchSubdomain()).toBe(result);
|
||||
const subdomain = await generateWorkbenchSubdomain();
|
||||
|
||||
expect(subdomain).toBe(result);
|
||||
expect(subdomain).toHaveLength(30);
|
||||
},
|
||||
);
|
||||
});
|
||||
|
|
@ -71,20 +77,20 @@ describe('~/ide/lib/gitlab_web_ide/get_base_config', () => {
|
|||
isMultiDomainEnabled.mockReturnValue(true);
|
||||
});
|
||||
|
||||
it('returns workbenchBaseUrl with external domain and base64 encoded subdomain', () => {
|
||||
expect(getBaseConfig().workbenchBaseUrl).toBe(
|
||||
`https://workbench-${generateWorkbenchSubdomain()}.cdn.web-ide.gitlab-static.net/gitlab-web-ide-vscode-workbench-${packageJSON.version}`,
|
||||
it('returns workbenchBaseUrl with external domain and base64 encoded subdomain', async () => {
|
||||
expect((await getBaseConfig()).workbenchBaseUrl).toBe(
|
||||
`https://workbench-${await generateWorkbenchSubdomain()}.cdn.web-ide.gitlab-static.net/gitlab-web-ide-vscode-workbench-${packageJSON.version}`,
|
||||
);
|
||||
});
|
||||
|
||||
it('returns extensionsHostBaseUrl with external domain and placeholder uuid subdomain', () => {
|
||||
expect(getBaseConfig().extensionsHostBaseUrl).toBe(
|
||||
it('returns extensionsHostBaseUrl with external domain and placeholder uuid subdomain', async () => {
|
||||
expect((await getBaseConfig()).extensionsHostBaseUrl).toBe(
|
||||
`https://{{uuid}}.cdn.web-ide.gitlab-static.net/gitlab-web-ide-vscode-workbench-${packageJSON.version}/vscode`,
|
||||
);
|
||||
});
|
||||
|
||||
it('returns shared base properties', () => {
|
||||
expect(getBaseConfig()).toStrictEqual(
|
||||
it('returns shared base properties', async () => {
|
||||
expect(await getBaseConfig()).toStrictEqual(
|
||||
expect.objectContaining({
|
||||
embedderOriginUrl: TEST_HOST,
|
||||
gitlabUrl: TEST_HOST,
|
||||
|
|
|
|||
|
|
@ -2,9 +2,6 @@ import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
|
|||
import projectImportGitlab from '~/projects/project_import_gitlab_project';
|
||||
|
||||
describe('Import Gitlab project', () => {
|
||||
const pathName = 'my-project';
|
||||
const projectName = 'My Project';
|
||||
|
||||
const setTestFixtures = (url) => {
|
||||
window.history.pushState({}, null, url);
|
||||
|
||||
|
|
@ -16,45 +13,63 @@ describe('Import Gitlab project', () => {
|
|||
projectImportGitlab();
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
setTestFixtures(`?name=${projectName}&path=${pathName}`);
|
||||
});
|
||||
describe('with preset data in window history', () => {
|
||||
const pathName = 'my-project';
|
||||
const projectName = 'My Project';
|
||||
|
||||
afterEach(() => {
|
||||
window.history.pushState({}, null, '');
|
||||
resetHTMLFixture();
|
||||
});
|
||||
|
||||
describe('project name', () => {
|
||||
it('should fill in the project name derived from the previously filled project name', () => {
|
||||
expect(document.querySelector('.js-project-name').value).toEqual(projectName);
|
||||
beforeEach(() => {
|
||||
setTestFixtures(`?name=${projectName}&path=${pathName}`);
|
||||
});
|
||||
|
||||
describe('empty path name', () => {
|
||||
it('derives the path name from the previously filled project name', () => {
|
||||
const alternateProjectName = 'My Alt Project';
|
||||
const alternatePathName = 'my-alt-project';
|
||||
afterEach(() => {
|
||||
window.history.pushState({}, null, '');
|
||||
resetHTMLFixture();
|
||||
});
|
||||
|
||||
setTestFixtures(`?name=${alternateProjectName}`);
|
||||
describe('project name', () => {
|
||||
it('should fill in the project name derived from the previously filled project name', () => {
|
||||
expect(document.querySelector('.js-project-name').value).toEqual(projectName);
|
||||
});
|
||||
|
||||
expect(document.querySelector('.js-path-name').value).toEqual(alternatePathName);
|
||||
describe('empty path name', () => {
|
||||
it('derives the path name from the previously filled project name', () => {
|
||||
const alternateProjectName = 'My Alt Project';
|
||||
const alternatePathName = 'my-alt-project';
|
||||
|
||||
setTestFixtures(`?name=${alternateProjectName}`);
|
||||
|
||||
expect(document.querySelector('.js-path-name').value).toEqual(alternatePathName);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('path name', () => {
|
||||
it('should fill in the path name derived from the previously filled path name', () => {
|
||||
expect(document.querySelector('.js-path-name').value).toEqual(pathName);
|
||||
});
|
||||
|
||||
describe('empty project name', () => {
|
||||
it('derives the project name from the previously filled path name', () => {
|
||||
const alternateProjectName = 'My Alt Project';
|
||||
const alternatePathName = 'my-alt-project';
|
||||
|
||||
setTestFixtures(`?path=${alternatePathName}`);
|
||||
|
||||
expect(document.querySelector('.js-project-name').value).toEqual(alternateProjectName);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('path name', () => {
|
||||
it('should fill in the path name derived from the previously filled path name', () => {
|
||||
expect(document.querySelector('.js-path-name').value).toEqual(pathName);
|
||||
describe('without preset data in window history', () => {
|
||||
beforeEach(() => {
|
||||
setTestFixtures('');
|
||||
});
|
||||
|
||||
describe('empty project name', () => {
|
||||
it('derives the project name from the previously filled path name', () => {
|
||||
const alternateProjectName = 'My Alt Project';
|
||||
const alternatePathName = 'my-alt-project';
|
||||
|
||||
setTestFixtures(`?path=${alternatePathName}`);
|
||||
|
||||
expect(document.querySelector('.js-project-name').value).toEqual(alternateProjectName);
|
||||
describe('empty path name with no previous history', () => {
|
||||
it('has no initial value for path or name', () => {
|
||||
expect(document.querySelector('.js-project-name').value).toBe('');
|
||||
expect(document.querySelector('.js-path-name').value).toBe('');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -156,10 +156,16 @@ describe('Diffs list store', () => {
|
|||
itSetsStatuses(() => store.reloadDiffs('/stream'));
|
||||
itAddsLoadingFilesWhileStreaming(() => store.reloadDiffs('/stream'));
|
||||
|
||||
it('sets loading state', () => {
|
||||
store.reloadDiffs('/stream');
|
||||
expect(findDiffsList().dataset.loading).toBe('true');
|
||||
});
|
||||
|
||||
it('clears existing state', async () => {
|
||||
store.reloadDiffs('/stream');
|
||||
await waitForPromises();
|
||||
expect(findDiffsList().innerHTML).toBe('');
|
||||
expect(findDiffsList().dataset.loading).toBe(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
|
|||
|
||||
describe '#get' do
|
||||
it 'returns the value when there is an existing value stored in the counter' do
|
||||
Gitlab::Redis::BufferedCounter.with do |redis|
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
redis.set(counter.key, 456)
|
||||
end
|
||||
|
||||
|
|
@ -393,7 +393,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
|
|||
it 'removes the key from Redis' do
|
||||
counter.initiate_refresh!
|
||||
|
||||
Gitlab::Redis::BufferedCounter.with do |redis|
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
expect(redis.exists?(counter.key)).to eq(false)
|
||||
end
|
||||
end
|
||||
|
|
@ -488,7 +488,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
|
|||
end
|
||||
|
||||
it 'removes all tracking keys' do
|
||||
Gitlab::Redis::BufferedCounter.with do |redis|
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
expect { counter.cleanup_refresh }
|
||||
.to change { redis.scan_each(match: "#{counter.refresh_key}*").to_a.count }.from(4).to(0)
|
||||
end
|
||||
|
|
@ -533,7 +533,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
|
|||
let(:flushed_amount) { 10 }
|
||||
|
||||
before do
|
||||
Gitlab::Redis::BufferedCounter.with do |redis|
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
redis.incrby(counter.flushed_key, flushed_amount)
|
||||
end
|
||||
end
|
||||
|
|
@ -546,7 +546,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
|
|||
it 'deletes the relative :flushed key' do
|
||||
counter.commit_increment!
|
||||
|
||||
Gitlab::Redis::BufferedCounter.with do |redis|
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
key_exists = redis.exists?(counter.flushed_key)
|
||||
expect(key_exists).to be_falsey
|
||||
end
|
||||
|
|
@ -555,7 +555,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
|
|||
|
||||
context 'when deleting :flushed key fails' do
|
||||
before do
|
||||
Gitlab::Redis::BufferedCounter.with do |redis|
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
redis.incrby(counter.flushed_key, 10)
|
||||
|
||||
allow(redis).to receive(:del).and_raise('could not delete key')
|
||||
|
|
@ -614,7 +614,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
|
|||
|
||||
with_them do
|
||||
before do
|
||||
Gitlab::Redis::BufferedCounter.with do |redis|
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
redis.set(increment_key, increment) if increment
|
||||
redis.set(flushed_key, flushed) if flushed
|
||||
end
|
||||
|
|
@ -635,19 +635,19 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
|
|||
end
|
||||
|
||||
def redis_get_key(key)
|
||||
Gitlab::Redis::BufferedCounter.with do |redis|
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
redis.get(key)
|
||||
end
|
||||
end
|
||||
|
||||
def redis_exists_key(key)
|
||||
Gitlab::Redis::BufferedCounter.with do |redis|
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
redis.exists?(key)
|
||||
end
|
||||
end
|
||||
|
||||
def redis_key_ttl(key)
|
||||
Gitlab::Redis::BufferedCounter.with do |redis|
|
||||
Gitlab::Redis::SharedState.with do |redis|
|
||||
redis.ttl(key)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue