Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-11-15 03:11:37 +00:00
parent 38b948a54e
commit aa9f295453
105 changed files with 1832 additions and 1028 deletions

View File

@ -620,6 +620,16 @@ upload-knapsack-report:
stage: report
when: always
export-test-metrics:
extends:
- .qa-install
- .ruby-image
- .rules:report:process-results
stage: report
when: always
script:
- bundle exec rake "ci:export_test_metrics[$CI_PROJECT_DIR/gitlab-qa-run-*/**/test-metrics-*.json]"
relate-test-failures:
extends:
- .qa-install

View File

@ -8,7 +8,6 @@
variables:
USE_BUNDLE_INSTALL: "false"
SETUP_DB: "false"
QA_EXPORT_TEST_METRICS: "false"
before_script:
- !reference [.default-before_script, before_script]
- cd qa && bundle install

View File

@ -178,7 +178,7 @@ notify-slack:
extends:
- .notify-slack-qa
- .qa-cache
- .rules:notify-slack
- .rules:main-run
stage: post-qa
variables:
RUN_WITH_BUNDLE: "true"
@ -188,6 +188,16 @@ notify-slack:
STATUS_SYM: ☠️
STATUS: failed
TYPE: "(review-app) "
when: on_failure
script:
- bundle exec gitlab-qa-report --prepare-stage-reports "$CI_PROJECT_DIR/qa/tmp/rspec-*.xml" # generate summary
- !reference [.notify-slack-qa, script]
export-test-metrics:
extends:
- .bundle-base
- .rules:main-run
stage: post-qa
when: always
script:
- bundle exec rake "ci:export_test_metrics[tmp/test-metrics-*.json]"

View File

@ -98,7 +98,6 @@
rules:
- when: always
.rules:notify-slack:
.rules:main-run:
rules:
- <<: *default-branch
when: on_failure
- *default-branch

View File

@ -1137,6 +1137,8 @@
SKIP_REPORT_IN_ISSUES: "false"
PROCESS_TEST_RESULTS: "true"
KNAPSACK_GENERATE_REPORT: "true"
QA_SAVE_TEST_METRICS: "true"
QA_EXPORT_TEST_METRICS: "false" # on main runs, metrics are exported to separate bucket via rake task for better consistency
- <<: *if-force-ci
when: manual
allow_failure: true
@ -1899,6 +1901,8 @@
allow_failure: true
variables:
KNAPSACK_GENERATE_REPORT: "true"
QA_SAVE_TEST_METRICS: "true"
QA_EXPORT_TEST_METRICS: "false" # on main runs, metrics are exported to separate bucket via rake task for better consistency
.review:rules:review-build-cng:
rules:

View File

@ -477,7 +477,7 @@ gem 'html2text'
gem 'stackprof', '~> 0.2.21', require: false
gem 'rbtrace', '~> 0.4', require: false
gem 'memory_profiler', '~> 0.9', require: false
gem 'memory_profiler', '~> 1.0', require: false
gem 'activerecord-explain-analyze', '~> 0.1', require: false
# OAuth

View File

@ -50,7 +50,7 @@
{"name":"benchmark","version":"0.2.0","platform":"ruby","checksum":"5f7087b794613abdd3ac9c13f4351f65b164bcb15ced2ad29508e365f9b28c77"},
{"name":"benchmark-ips","version":"2.3.0","platform":"ruby","checksum":"12443aa327d3129aa965244f79d7d5cb0f692f0f92ba7db76fba61526a40062e"},
{"name":"benchmark-malloc","version":"0.2.0","platform":"ruby","checksum":"37c68f0435261634026f584d79956a35325a3027e3e6b4cc8d7575aa10537e6b"},
{"name":"benchmark-memory","version":"0.1.2","platform":"ruby","checksum":"aa7bfe6776174d0ddefe6fb39945d88fff6d76eac165690188391d9acd441c87"},
{"name":"benchmark-memory","version":"0.2.0","platform":"ruby","checksum":"ca1e436433b09535ee8f64f80600a5edb407cff1f6ac70e089ca238118e6ab5c"},
{"name":"benchmark-perf","version":"0.6.0","platform":"ruby","checksum":"fe2b01959f3de0f9dd34820d54ef881eb4f3589fccb7d17b63068ac92d7f9621"},
{"name":"benchmark-trend","version":"0.4.0","platform":"ruby","checksum":"de5a02a9f443babefbbd97784759820decee8554a0c273d859c02a0990845d81"},
{"name":"better_errors","version":"2.9.1","platform":"ruby","checksum":"39efc116ab04d6c4200052c5782936e4bd99906978d098992bce6bf81d054284"},
@ -103,7 +103,7 @@
{"name":"declarative_policy","version":"1.1.0","platform":"ruby","checksum":"9af4cf299ade03f2bbf63908f2ce6a117d132fc714c39a128596667fb13331cb"},
{"name":"default_value_for","version":"3.4.0","platform":"ruby","checksum":"35d2dc51675a6bedfa875778628d44b823e0d7336da9432519477174ebb0f40f"},
{"name":"deprecation_toolkit","version":"1.5.1","platform":"ruby","checksum":"a8a1ab1a19ae40ea12560b65010e099f3459ebde390b76621ef0c21c516a04ba"},
{"name":"derailed_benchmarks","version":"2.1.1","platform":"ruby","checksum":"25b0ba79dc52c715ea6cce35fd8afbbf84511ef1c7f2bbe1d8b7a30addc6aab3"},
{"name":"derailed_benchmarks","version":"2.1.2","platform":"ruby","checksum":"eaadc6206ceeb5538ff8f5e04a0023d54ebdd95d04f33e8960fb95a5f189a14f"},
{"name":"device_detector","version":"1.0.0","platform":"ruby","checksum":"b800fb3150b00c23e87b6768011808ac1771fffaae74c3238ebaf2b782947a7d"},
{"name":"devise","version":"4.8.1","platform":"ruby","checksum":"fdd48bbe79a89e7c1152236a70479842ede48bea4fa7f4f2d8da1f872559803e"},
{"name":"devise-two-factor","version":"4.0.2","platform":"ruby","checksum":"6548d2696ed090d27046f888f4fa7380f151e0f823902d46fd9b91e7d0cac511"},
@ -331,7 +331,7 @@
{"name":"marcel","version":"1.0.2","platform":"ruby","checksum":"a013b677ef46cbcb49fd5c59b3d35803d2ee04dd75d8bfdc43533fc5a31f7e4e"},
{"name":"marginalia","version":"1.11.1","platform":"ruby","checksum":"cb63212ab63e42746e27595e912cb20408a1a28bcd0edde55d15b7c45fa289cf"},
{"name":"memoist","version":"0.16.2","platform":"ruby","checksum":"a52c53a3f25b5875151670b2f3fd44388633486dc0f09f9a7150ead1e3bf3c45"},
{"name":"memory_profiler","version":"0.9.14","platform":"ruby","checksum":"de558cf6525d8d56d2c0ea465b1664517fbe45560f892dc7a898d3b8c2863b12"},
{"name":"memory_profiler","version":"1.0.1","platform":"ruby","checksum":"38cdb42f22d9100df2eba0365c199724b58b05c38e765cd764a07392916901b1"},
{"name":"method_source","version":"1.0.0","platform":"ruby","checksum":"d779455a2b5666a079ce58577bfad8534f571af7cec8107f4dce328f0981dede"},
{"name":"mime-types","version":"3.4.1","platform":"ruby","checksum":"6bcf8b0e656b6ae9977bdc1351ef211d0383252d2f759a59ef4bcf254542fc46"},
{"name":"mime-types-data","version":"3.2022.0105","platform":"ruby","checksum":"d8c401ba9ea8b648b7145b90081789ec714e91fd625d82c5040079c5ea696f00"},

View File

@ -219,8 +219,8 @@ GEM
benchmark (0.2.0)
benchmark-ips (2.3.0)
benchmark-malloc (0.2.0)
benchmark-memory (0.1.2)
memory_profiler (~> 0.9)
benchmark-memory (0.2.0)
memory_profiler (~> 1)
benchmark-perf (0.6.0)
benchmark-trend (0.4.0)
better_errors (2.9.1)
@ -330,7 +330,7 @@ GEM
activerecord (>= 3.2.0, < 7.0)
deprecation_toolkit (1.5.1)
activesupport (>= 4.2)
derailed_benchmarks (2.1.1)
derailed_benchmarks (2.1.2)
benchmark-ips (~> 2)
dead_end
get_process_mem (~> 0)
@ -892,7 +892,7 @@ GEM
actionpack (>= 5.2)
activerecord (>= 5.2)
memoist (0.16.2)
memory_profiler (0.9.14)
memory_profiler (1.0.1)
method_source (1.0.0)
mime-types (3.4.1)
mime-types-data (~> 3.2015)
@ -1727,7 +1727,7 @@ DEPENDENCIES
mail (= 2.7.1)
mail-smtp_pool (~> 0.1.0)!
marginalia (~> 1.11.1)
memory_profiler (~> 0.9)
memory_profiler (~> 1.0)
microsoft_graph_mailer (~> 0.1.0)!
mini_magick (~> 4.10.1)
minitest (~> 5.11.0)

View File

@ -1,11 +1,38 @@
import Vue from 'vue';
import $ from 'jquery';
import { escape } from 'lodash';
import GroupSelect from '~/vue_shared/components/group_select/group_select.vue';
import { groupsPath } from '~/vue_shared/components/group_select/utils';
import { __ } from '~/locale';
import Api from './api';
import { loadCSSFile } from './lib/utils/css_utils';
import { select2AxiosTransport } from './lib/utils/select2_utils';
const initVueSelect = () => {
[...document.querySelectorAll('.ajax-groups-select')].forEach((el) => {
const { parentId: parentGroupID, groupsFilter, inputId } = el.dataset;
return new Vue({
el,
components: {
GroupSelect,
},
render(createElement) {
return createElement(GroupSelect, {
props: {
inputName: el.name,
initialSelection: el.value || null,
parentGroupID,
groupsFilter,
inputId,
clearable: el.classList.contains('allowClear'),
},
});
},
});
});
};
const groupsSelect = () => {
loadCSSFile(gon.select2_css_path)
.then(() => {
@ -84,8 +111,12 @@ const groupsSelect = () => {
export default () => {
if ($('.ajax-groups-select').length) {
import(/* webpackChunkName: 'select2' */ 'select2/select2')
.then(groupsSelect)
.catch(() => {});
if (gon.features?.vueGroupSelect) {
initVueSelect();
} else {
import(/* webpackChunkName: 'select2' */ 'select2/select2')
.then(groupsSelect)
.catch(() => {});
}
}
};

View File

@ -1,23 +1,12 @@
/* eslint-disable no-new */
import ShortcutsNavigation from '~/behaviors/shortcuts/shortcuts_navigation';
import { ACTIVE_TAB_SHARED, ACTIVE_TAB_ARCHIVED } from '~/groups/constants';
import initInviteMembersBanner from '~/groups/init_invite_members_banner';
import initInviteMembersModal from '~/invite_members/init_invite_members_modal';
import { getPagePath, getDashPath } from '~/lib/utils/common_utils';
import initNotificationsDropdown from '~/notifications';
import ProjectsList from '~/projects_list';
import GroupTabs from './group_tabs';
export default function initGroupDetails(actionName = 'show') {
const loadableActions = [ACTIVE_TAB_SHARED, ACTIVE_TAB_ARCHIVED];
const dashPath = getDashPath();
let action = loadableActions.includes(dashPath) ? dashPath : getPagePath(1);
if (actionName && action === actionName) {
action = 'show'; // 'show' resets GroupTabs to default action through base class
}
new GroupTabs({ parentEl: '.groups-listing', action });
export default function initGroupDetails() {
new ShortcutsNavigation();
initNotificationsDropdown();

View File

@ -1,136 +0,0 @@
import $ from 'jquery';
import createGroupTree from '~/groups';
import {
ACTIVE_TAB_SUBGROUPS_AND_PROJECTS,
ACTIVE_TAB_SHARED,
ACTIVE_TAB_ARCHIVED,
CONTENT_LIST_CLASS,
GROUPS_LIST_HOLDER_CLASS,
GROUPS_FILTER_FORM_CLASS,
} from '~/groups/constants';
import GroupFilterableList from '~/groups/groups_filterable_list';
import { removeParams } from '~/lib/utils/url_utility';
import UserTabs from '~/pages/users/user_tabs';
export default class GroupTabs extends UserTabs {
constructor({ defaultAction = 'subgroups_and_projects', action, parentEl }) {
super({ defaultAction, action, parentEl });
}
bindEvents() {
this.$parentEl
.off('shown.bs.tab', '.nav-links a[data-toggle="tab"]')
.on('shown.bs.tab', '.nav-links a[data-toggle="tab"]', (event) => this.tabShown(event));
}
tabShown(event) {
const $target = $(event.target);
const action = $target.data('action') || $target.data('targetSection');
const source = $target.attr('href') || $target.data('targetPath');
document.querySelector(GROUPS_FILTER_FORM_CLASS).action = source;
this.setTab(action);
return this.setCurrentAction(source);
}
setTab(action) {
const loadableActions = [
ACTIVE_TAB_SUBGROUPS_AND_PROJECTS,
ACTIVE_TAB_SHARED,
ACTIVE_TAB_ARCHIVED,
];
this.enableSearchBar(action);
this.action = action;
if (this.loaded[action]) {
return;
}
if (loadableActions.includes(action)) {
this.cleanFilterState();
this.loadTab(action);
}
}
loadTab(action) {
const elId = `js-groups-${action}-tree`;
const endpoint = this.getEndpoint(action);
this.toggleLoading(true);
createGroupTree(elId, endpoint, action);
this.loaded[action] = true;
this.toggleLoading(false);
}
getEndpoint(action) {
const { endpointsDefault, endpointsShared } = this.$parentEl.data();
let endpoint;
switch (action) {
case ACTIVE_TAB_ARCHIVED:
endpoint = `${endpointsDefault}?archived=only`;
break;
case ACTIVE_TAB_SHARED:
endpoint = endpointsShared;
break;
default:
// ACTIVE_TAB_SUBGROUPS_AND_PROJECTS
endpoint = endpointsDefault;
break;
}
return endpoint;
}
enableSearchBar(action) {
const containerEl = document.getElementById(action);
const form = document.querySelector(GROUPS_FILTER_FORM_CLASS);
const filter = form.querySelector('.js-groups-list-filter');
const holder = containerEl.querySelector(GROUPS_LIST_HOLDER_CLASS);
const dataEl = containerEl.querySelector(CONTENT_LIST_CLASS);
const endpoint = this.getEndpoint(action);
if (!dataEl) {
return;
}
const { dataset } = dataEl;
const opts = {
form,
filter,
holder,
filterEndpoint: endpoint || dataset.endpoint,
pagePath: null,
dropdownSel: '.js-group-filter-dropdown-wrap',
filterInputField: 'filter',
action,
};
if (!this.loaded[action]) {
const filterableList = new GroupFilterableList(opts);
filterableList.initSearch();
}
}
cleanFilterState() {
const values = Object.values(this.loaded);
const loadedTabs = values.filter((e) => e === true);
if (!loadedTabs.length) {
return;
}
const newState = removeParams(['page'], window.location.search);
window.history.replaceState(
{
url: newState,
},
document.title,
newState,
);
}
}

View File

@ -0,0 +1,6 @@
import { __ } from '~/locale';
export const TOGGLE_TEXT = __('Search for a group');
export const FETCH_GROUPS_ERROR = __('Unable to fetch groups. Reload the page to try again.');
export const FETCH_GROUP_ERROR = __('Unable to fetch group. Reload the page to try again.');
export const QUERY_TOO_SHORT_MESSAGE = __('Enter at least three characters to search.');

View File

@ -0,0 +1,195 @@
<script>
import { debounce } from 'lodash';
import { GlListbox } from '@gitlab/ui';
import axios from '~/lib/utils/axios_utils';
import Api from '~/api';
import { __ } from '~/locale';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import { createAlert } from '~/flash';
import { groupsPath } from './utils';
import {
TOGGLE_TEXT,
FETCH_GROUPS_ERROR,
FETCH_GROUP_ERROR,
QUERY_TOO_SHORT_MESSAGE,
} from './constants';
const MINIMUM_QUERY_LENGTH = 3;
export default {
components: {
GlListbox,
},
props: {
inputName: {
type: String,
required: true,
},
inputId: {
type: String,
required: true,
},
initialSelection: {
type: String,
required: false,
default: null,
},
clearable: {
type: Boolean,
required: false,
default: false,
},
parentGroupID: {
type: String,
required: false,
default: null,
},
groupsFilter: {
type: String,
required: false,
default: null,
},
},
data() {
return {
pristine: true,
searching: false,
searchString: '',
groups: [],
selectedValue: null,
selectedText: null,
};
},
computed: {
selected: {
set(value) {
this.selectedValue = value;
this.selectedText =
value === null ? null : this.groups.find((group) => group.value === value).full_name;
},
get() {
return this.selectedValue;
},
},
toggleText() {
return this.selectedText ?? this.$options.i18n.toggleText;
},
inputValue() {
return this.selectedValue ? this.selectedValue : '';
},
isSearchQueryTooShort() {
return this.searchString && this.searchString.length < MINIMUM_QUERY_LENGTH;
},
noResultsText() {
return this.isSearchQueryTooShort
? this.$options.i18n.searchQueryTooShort
: this.$options.i18n.noResultsText;
},
},
created() {
this.fetchInitialSelection();
},
methods: {
search: debounce(function debouncedSearch(searchString) {
this.searchString = searchString;
if (this.isSearchQueryTooShort) {
this.groups = [];
} else {
this.fetchGroups(searchString);
}
}, DEFAULT_DEBOUNCE_AND_THROTTLE_MS),
async fetchGroups(searchString = '') {
this.searching = true;
try {
const { data } = await axios.get(
Api.buildUrl(groupsPath(this.groupsFilter, this.parentGroupID)),
{
params: {
search: searchString,
},
},
);
const groups = data.length ? data : data.results || [];
this.groups = groups.map((group) => ({
...group,
value: String(group.id),
}));
this.searching = false;
} catch (error) {
createAlert({
message: FETCH_GROUPS_ERROR,
error,
parent: this.$el,
});
}
},
async fetchInitialSelection() {
if (!this.initialSelection) {
this.pristine = false;
return;
}
this.searching = true;
try {
const group = await Api.group(this.initialSelection);
this.selectedValue = this.initialSelection;
this.selectedText = group.full_name;
this.pristine = false;
this.searching = false;
} catch (error) {
createAlert({
message: FETCH_GROUP_ERROR,
error,
parent: this.$el,
});
}
},
onShown() {
if (!this.searchString && !this.groups.length) {
this.fetchGroups();
}
},
onReset() {
this.selected = null;
},
},
i18n: {
toggleText: TOGGLE_TEXT,
selectGroup: __('Select a group'),
reset: __('Reset'),
noResultsText: __('No results found.'),
searchQueryTooShort: QUERY_TOO_SHORT_MESSAGE,
},
};
</script>
<template>
<div>
<gl-listbox
ref="listbox"
v-model="selected"
:header-text="$options.i18n.selectGroup"
:reset-button-label="$options.i18n.reset"
:toggle-text="toggleText"
:loading="searching && pristine"
:searching="searching"
:items="groups"
:no-results-text="noResultsText"
searchable
@shown="onShown"
@search="search"
@reset="onReset"
>
<template #list-item="{ item }">
<div class="gl-font-weight-bold">
{{ item.full_name }}
</div>
<div class="gl-text-gray-300">{{ item.full_path }}</div>
</template>
</gl-listbox>
<div class="flash-container"></div>
<input :id="inputId" data-testid="input" type="hidden" :name="inputName" :value="inputValue" />
</div>
</template>

View File

@ -35,7 +35,7 @@ module SendFileUpload
end
def cdn_fronted_url(file, redirect_params)
if Feature.enabled?(:use_cdn_with_job_artifacts_ui_downloads) && file.respond_to?(:cdn_enabled_url)
if file.respond_to?(:cdn_enabled_url)
result = file.cdn_enabled_url(request.remote_ip, redirect_params[:query])
Gitlab::ApplicationContext.push(artifact_used_cdn: result.used_cdn)
result.url

View File

@ -8,7 +8,7 @@ module VerifiesWithEmail
include ActionView::Helpers::DateHelper
included do
prepend_before_action :verify_with_email, only: :create, unless: -> { two_factor_enabled? }
prepend_before_action :verify_with_email, only: :create, unless: -> { skip_verify_with_email? }
skip_before_action :required_signup_info, only: :successful_verification
end
@ -55,6 +55,10 @@ module VerifiesWithEmail
private
def skip_verify_with_email?
two_factor_enabled? || Gitlab::Qa.request?(request.user_agent)
end
def find_verification_user
return unless session[:verification_user_id]

View File

@ -97,7 +97,7 @@ module Resolvers
end
def ready?(**args)
if args[:or].present? && ::Feature.disabled?(:or_issuable_queries, resource_parent)
if args[:or].present? && or_issuable_queries_disabled?
raise ::Gitlab::Graphql::Errors::ArgumentError,
"'or' arguments are only allowed when the `or_issuable_queries` feature flag is enabled."
end
@ -115,6 +115,14 @@ module Resolvers
private
def or_issuable_queries_disabled?
if respond_to?(:resource_parent, true)
::Feature.disabled?(:or_issuable_queries, resource_parent)
else
::Feature.disabled?(:or_issuable_queries)
end
end
def prepare_finder_params(args)
params = super(args)
params[:not] = params[:not].to_h if params[:not]

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
module Resolvers
class IssuesResolver < Issues::BaseResolver
prepend ::Issues::LookAheadPreloads
include ::Issues::SortArguments
argument :state, Types::IssuableStateEnum,
required: false,
description: 'Current state of this issue.'
# see app/graphql/types/issue_connection.rb
type 'Types::IssueConnection', null: true
def resolve_with_lookahead(**args)
return unless Feature.enabled?(:root_level_issues_query)
issues = apply_lookahead(
IssuesFinder.new(current_user, prepare_finder_params(args)).execute
)
if non_stable_cursor_sort?(args[:sort])
# Certain complex sorts are not supported by the stable cursor pagination yet.
# In these cases, we use offset pagination, so we return the correct connection.
offset_pagination(issues)
else
issues
end
end
end
end

View File

@ -82,6 +82,13 @@ module Types
field :echo, resolver: Resolvers::EchoResolver
field :issues,
null: true,
alpha: { milestone: '15.6' },
resolver: Resolvers::IssuesResolver,
description: 'Issues visible by the current user.' \
' Returns null if the `root_level_issues_query` feature flag is disabled.'
field :issue, Types::IssueType,
null: true,
description: 'Find an issue.' do

View File

@ -148,8 +148,13 @@ module GroupsHelper
}
end
def subgroups_and_projects_list_app_data(group)
def group_overview_tabs_app_data(group)
{
subgroups_and_projects_endpoint: group_children_path(group, format: :json),
shared_projects_endpoint: group_shared_projects_path(group, format: :json),
archived_projects_endpoint: group_children_path(group, format: :json, archived: 'only'),
current_group_visibility: group.visibility,
initial_sort: project_list_sort_by,
show_schema_markup: 'true',
new_subgroup_path: new_group_path(parent_id: group.id, anchor: 'create-group-pane'),
new_project_path: new_project_path(namespace_id: group.id),
@ -162,16 +167,6 @@ module GroupsHelper
}
end
def group_overview_tabs_app_data(group)
{
subgroups_and_projects_endpoint: group_children_path(group, format: :json),
shared_projects_endpoint: group_shared_projects_path(group, format: :json),
archived_projects_endpoint: group_children_path(group, format: :json, archived: 'only'),
current_group_visibility: group.visibility,
initial_sort: project_list_sort_by
}.merge(subgroups_and_projects_list_app_data(group))
end
def enabled_git_access_protocol_options_for_group
case ::Gitlab::CurrentSettings.enabled_git_access_protocol
when nil, ""

View File

@ -2,27 +2,17 @@
module RecaptchaHelper
def recaptcha_enabled?
return false if gitlab_qa?
return false if Gitlab::Qa.request?(request.user_agent)
!!Gitlab::Recaptcha.enabled?
end
alias_method :show_recaptcha_sign_up?, :recaptcha_enabled?
def recaptcha_enabled_on_login?
return false if gitlab_qa?
return false if Gitlab::Qa.request?(request.user_agent)
Gitlab::Recaptcha.enabled_on_login?
end
private
def gitlab_qa?
return false unless Gitlab.com?
return false unless request.user_agent.present?
return false unless Gitlab::Environment.qa_user_agent.present?
ActiveSupport::SecurityUtils.secure_compare(request.user_agent, Gitlab::Environment.qa_user_agent)
end
end
RecaptchaHelper.prepend_mod

View File

@ -246,9 +246,7 @@ class MergeRequest < ApplicationRecord
end
after_transition any => [:unchecked, :cannot_be_merged_recheck, :checking, :cannot_be_merged_rechecking, :can_be_merged, :cannot_be_merged] do |merge_request, transition|
if Feature.enabled?(:trigger_mr_subscription_on_merge_status_change, merge_request.project)
GraphqlTriggers.merge_request_merge_status_updated(merge_request)
end
GraphqlTriggers.merge_request_merge_status_updated(merge_request)
end
# rubocop: disable CodeReuse/ServiceClass

View File

@ -1,7 +0,0 @@
#js-groups-archived-tree
.empty-state.text-center.hidden
%p= _("There are no archived projects yet")
%ul.content-list{ data: { hide_projects: 'false', group_id: group.id, path: group_path(group) } }
.js-groups-list-holder
= gl_loading_icon(size: 'md', css_class: 'gl-mt-6')

View File

@ -1,7 +0,0 @@
#js-groups-shared-tree
.empty-state.text-center.hidden
%p= _("There are no projects shared with this group yet")
%ul.content-list{ data: { hide_projects: 'false', group_id: group.id, path: group_path(group) } }
.js-groups-list-holder{ data: { current_group_visibility: group.visibility } }
= gl_loading_icon

View File

@ -1,4 +0,0 @@
#js-groups-subgroups_and_projects-tree
%section{ data: { hide_projects: 'false', group_id: group.id, path: group_path(group) } }
.js-groups-list-holder{ data: subgroups_and_projects_list_app_data(group) }
= gl_loading_icon(size: 'md', css_class: 'gl-mt-6')

View File

@ -29,36 +29,4 @@
= render_if_exists 'groups/group_activity_analytics', group: @group
- if Feature.enabled?(:group_overview_tabs_vue, @group)
#js-group-overview-tabs{ data: group_overview_tabs_app_data(@group) }
- else
.groups-listing{ data: { endpoints: { default: group_children_path(@group, format: :json), shared: group_shared_projects_path(@group, format: :json) } } }
.top-area.group-nav-container.justify-content-between
.scrolling-tabs-container.inner-page-scroll-tabs
.fade-left= sprite_icon('chevron-lg-left', size: 12)
.fade-right= sprite_icon('chevron-lg-right', size: 12)
-# `item_active` is set to `false` as the active state is set by `app/assets/javascripts/pages/groups/shared/group_details.js`
-# TODO: Replace this approach in https://gitlab.com/gitlab-org/gitlab/-/issues/23466
= gl_tabs_nav({ class: 'nav-links scrolling-tabs gl-display-flex gl-flex-grow-1 gl-flex-nowrap gl-border-0' }) do
= gl_tab_link_to group_path, item_active: false, tab_class: 'js-subgroups_and_projects-tab', data: { target: 'div#subgroups_and_projects', action: 'subgroups_and_projects', toggle: 'tab' } do
= _("Subgroups and projects")
= gl_tab_link_to group_shared_path, item_active: false, tab_class: 'js-shared-tab', data: { target: 'div#shared', action: 'shared', toggle: 'tab' } do
= _("Shared projects")
= gl_tab_link_to group_archived_path, item_active: false, tab_class: 'js-archived-tab', data: { target: 'div#archived', action: 'archived', toggle: 'tab' } do
= _("Archived projects")
.nav-controls.d-block.d-md-flex
.group-search
= render "shared/groups/search_form"
= render "shared/groups/dropdown", options_hash: subgroups_sort_options_hash
.tab-content
#subgroups_and_projects.tab-pane
= render "subgroups_and_projects", group: @group
#shared.tab-pane
= render "shared_projects", group: @group
#archived.tab-pane
= render "archived_projects", group: @group
#js-group-overview-tabs{ data: group_overview_tabs_app_data(@group) }

View File

@ -1,8 +1,8 @@
---
name: use_cdn_with_job_artifacts_ui_downloads
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/102839
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/381479
name: root_level_issues_query
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/102348
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/382250
milestone: '15.6'
type: development
group: group::pipeline insights
group: group::project management
default_enabled: false

View File

@ -1,8 +0,0 @@
---
name: trigger_mr_subscription_on_merge_status_change
introduced_by_url: "https://gitlab.com/gitlab-org/gitlab/-/merge_requests/99213"
rollout_issue_url: "https://gitlab.com/gitlab-org/gitlab/-/issues/375704"
milestone: '15.5'
type: development
group: group::code review
default_enabled: false

View File

@ -1,8 +1,8 @@
---
name: group_overview_tabs_vue
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/95850
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/370872
milestone: '15.4'
name: vue_group_select
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/98597
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/381042
milestone: '15.6'
type: development
group: group::workspace
group: group::foundations
default_enabled: false

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailSentMetric
options:
track: create
series: 0
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailCtaClickedMetric
options:
track: create
series: 0
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailSentMetric
options:
track: create
series: 1
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailCtaClickedMetric
options:
track: create
series: 1
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailSentMetric
options:
track: create
series: 2
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailCtaClickedMetric
options:
track: create
series: 2
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailSentMetric
options:
track: verify
series: 0
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailCtaClickedMetric
options:
track: verify
series: 0
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailSentMetric
options:
track: verify
series: 1
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailCtaClickedMetric
options:
track: verify
series: 1
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailSentMetric
options:
track: verify
series: 2
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailCtaClickedMetric
options:
track: verify
series: 2
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailSentMetric
options:
track: trial
series: 0
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailCtaClickedMetric
options:
track: trial
series: 0
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailSentMetric
options:
track: trial
series: 1
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailCtaClickedMetric
options:
track: trial
series: 1
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailSentMetric
options:
track: trial
series: 2
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailCtaClickedMetric
options:
track: trial
series: 2
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailSentMetric
options:
track: team
series: 0
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailCtaClickedMetric
options:
track: team
series: 0
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailSentMetric
options:
track: team
series: 1
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailCtaClickedMetric
options:
track: team
series: 1
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailSentMetric
options:
track: team
series: 2
distribution:
- ce
- ee

View File

@ -13,6 +13,10 @@ milestone: "13.12"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/56752
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailCtaClickedMetric
options:
track: team
series: 2
distribution:
- ce
- ee

View File

@ -12,6 +12,10 @@ milestone: "14.2"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/66854
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailCtaClickedMetric
options:
track: team_short
series: 0
data_category: optional
distribution:
- ce

View File

@ -12,6 +12,10 @@ milestone: "14.2"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/66854
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailSentMetric
options:
track: team_short
series: 0
data_category: optional
distribution:
- ce

View File

@ -12,6 +12,10 @@ milestone: "14.2"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/66943
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailCtaClickedMetric
options:
track: trial_short
series: 0
data_category: optional
distribution:
- ce

View File

@ -12,6 +12,10 @@ milestone: "14.2"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/66943
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailSentMetric
options:
track: trial_short
series: 0
data_category: optional
distribution:
- ce

View File

@ -12,6 +12,10 @@ milestone: "14.2"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67147
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailCtaClickedMetric
options:
track: admin_verify
series: 0
data_category: optional
distribution:
- ce

View File

@ -12,6 +12,10 @@ milestone: "14.2"
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67147
time_frame: all
data_source: database
instrumentation_class: InProductMarketingEmailSentMetric
options:
track: admin_verify
series: 0
data_category: optional
distribution:
- ce

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class AddIndexForInProductMarketingEmailMetrics < Gitlab::Database::Migration[2.0]
disable_ddl_transaction!
INDEX_NAME = 'index_in_product_marketing_emails_on_track_series_id_clicked'
def up
add_concurrent_index :in_product_marketing_emails, %i[track series id cta_clicked_at], name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :in_product_marketing_emails, INDEX_NAME
end
end

View File

@ -1,74 +1,8 @@
# frozen_string_literal: true
class MigrateSidekiqQueuedJobs < Gitlab::Database::Migration[2.0]
class SidekiqMigrateJobs
LOG_FREQUENCY_QUEUES = 10
attr_reader :logger, :mappings
# mappings is a hash of WorkerClassName => target_queue_name
def initialize(mappings, logger: nil)
@mappings = mappings
@logger = logger
end
# Migrates jobs from queues that are outside the mappings
# rubocop: disable Cop/SidekiqRedisCall
def migrate_queues
routing_rules_queues = mappings.values.uniq
logger&.info("List of queues based on routing rules: #{routing_rules_queues}")
Sidekiq.redis do |conn|
# Redis 6 supports conn.scan_each(match: "queue:*", type: 'list')
conn.scan_each(match: "queue:*") do |key|
# Redis 5 compatibility
next unless conn.type(key) == 'list'
queue_from = key.split(':', 2).last
next if routing_rules_queues.include?(queue_from)
logger&.info("Migrating #{queue_from} queue")
migrated = 0
while queue_length(queue_from) > 0
begin
if migrated >= 0 && migrated % LOG_FREQUENCY_QUEUES == 0
logger&.info("Migrating from #{queue_from}. Total: #{queue_length(queue_from)}. Migrated: #{migrated}.")
end
job = conn.rpop "queue:#{queue_from}"
job_hash = Sidekiq.load_json job
next unless mappings.has_key?(job_hash['class'])
destination_queue = mappings[job_hash['class']]
job_hash['queue'] = destination_queue
conn.lpush("queue:#{destination_queue}", Sidekiq.dump_json(job_hash))
migrated += 1
rescue JSON::ParserError
logger&.error("Unmarshal JSON payload from SidekiqMigrateJobs failed. Job: #{job}")
next
end
end
logger&.info("Finished migrating #{queue_from} queue")
end
end
end
private
def queue_length(queue_name)
Sidekiq.redis do |conn|
conn.llen("queue:#{queue_name}")
end
end
# rubocop: enable Cop/SidekiqRedisCall
end
def up
return if Gitlab.com?
mappings = Gitlab::SidekiqConfig.worker_queue_mappings
logger = ::Gitlab::BackgroundMigration::Logger.build
SidekiqMigrateJobs.new(mappings, logger: logger).migrate_queues
# no-op because of https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1991
end
def down

View File

@ -0,0 +1 @@
5ef00449d9c5b4a44b99410839d3ba4c4d6d8cb152460822b882c6ac60f771e3

View File

@ -29261,6 +29261,8 @@ CREATE INDEX index_imported_projects_on_import_type_creator_id_created_at ON pro
CREATE INDEX index_imported_projects_on_import_type_id ON projects USING btree (import_type, id) WHERE (import_type IS NOT NULL);
CREATE INDEX index_in_product_marketing_emails_on_track_series_id_clicked ON in_product_marketing_emails USING btree (track, series, id, cta_clicked_at);
CREATE UNIQUE INDEX index_in_product_marketing_emails_on_user_campaign ON in_product_marketing_emails USING btree (user_id, campaign);
CREATE INDEX index_in_product_marketing_emails_on_user_id ON in_product_marketing_emails USING btree (user_id);

View File

@ -28,7 +28,7 @@ full list of reference architectures, see
| Internal load balancing node<sup>3</sup> | 1 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Redis/Sentinel - Cache<sup>2</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| Redis/Sentinel - Persistent<sup>2</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| Gitaly<sup>5</sup><sup>6</sup> | 3 | 16 vCPU, 60 GB memory | `n1-standard-16` | `m5.4xlarge` |
| Gitaly<sup>5 6</sup> | 3 | 16 vCPU, 60 GB memory | `n1-standard-16` | `m5.4xlarge` |
| Praefect<sup>5</sup> | 3 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Praefect PostgreSQL<sup>1</sup> | 1+ | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Sidekiq | 4 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
@ -2300,6 +2300,10 @@ compute deployments. With this, _stateless_ components can benefit from cloud na
workload management benefits while _stateful_ components are deployed in compute VMs
with Omnibus to benefit from increased permanence.
Refer to the Helm charts [Advanced configuration](https://docs.gitlab.com/charts/advanced/)
documentation for setup instructions including guidance on what GitLab secrets to sync
between Kubernetes and the backend components.
NOTE:
This is an **advanced** setup. Running services in Kubernetes is well known
to be complex. **This setup is only recommended** if you have strong working
@ -2342,7 +2346,7 @@ services where applicable):
| Internal load balancing node<sup>3</sup> | 1 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Redis/Sentinel - Cache<sup>2</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| Redis/Sentinel - Persistent<sup>2</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| Gitaly<sup>5</sup><sup>6</sup> | 3 | 16 vCPU, 60 GB memory | `n1-standard-16` | `m5.4xlarge` |
| Gitaly<sup>5 6</sup> | 3 | 16 vCPU, 60 GB memory | `n1-standard-16` | `m5.4xlarge` |
| Praefect<sup>5</sup> | 3 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Praefect PostgreSQL<sup>1</sup> | 1+ | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Object storage<sup>4</sup> | - | - | - | - |
@ -2467,7 +2471,7 @@ ratio for each additional pod.
For further information on resource usage, see the [Sidekiq resources](https://docs.gitlab.com/charts/charts/gitlab/sidekiq/#resources).
### Supporting
#### Supporting
The Supporting Node Pool is designed to house all supporting deployments that don't need to be
on the Webservice and Sidekiq pools.
@ -2480,6 +2484,12 @@ to deploy these in this pool where possible and not in the Webservice or Sidekiq
specifically to accommodate several additional deployments. However, if your deployments don't fit into the
pool as given, you can increase the node pool accordingly.
## Secrets
When setting up a Cloud Native Hybrid environment, it's worth noting that several secrets should be synced from backend VMs from the `/etc/gitlab/gitlab-secrets.json` file into Kubernetes.
For this setup specifically, the [GitLab Rails](https://docs.gitlab.com/charts/installation/secrets.html#gitlab-rails-secret) and [GitLab Shell](https://docs.gitlab.com/charts/installation/secrets.html#gitlab-rails-secret) secrets should be synced.
<div align="right">
<a type="button" class="btn btn-default" href="#setup-components">
Back to setup components <i class="fa fa-angle-double-up" aria-hidden="true"></i>

View File

@ -28,7 +28,7 @@ full list of reference architectures, see
| Internal load balancing node<sup>3</sup> | 1 | 4 vCPU, 3.6 GB memory | `n1-highcpu-4` | `c5.xlarge` |
| Redis/Sentinel - Cache<sup>2</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| Redis/Sentinel - Persistent<sup>2</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| Gitaly<sup>5</sup><sup>6</sup> | 3 | 32 vCPU, 120 GB memory | `n1-standard-32` | `m5.8xlarge` |
| Gitaly<sup>5 6</sup> | 3 | 32 vCPU, 120 GB memory | `n1-standard-32` | `m5.8xlarge` |
| Praefect<sup>5</sup> | 3 | 4 vCPU, 3.6 GB memory | `n1-highcpu-4` | `c5.xlarge` |
| Praefect PostgreSQL<sup>1</sup> | 1+ | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Sidekiq | 4 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
@ -2319,6 +2319,10 @@ compute deployments. With this, _stateless_ components can benefit from cloud na
workload management benefits while _stateful_ components are deployed in compute VMs
with Omnibus to benefit from increased permanence.
Refer to the Helm charts [Advanced configuration](https://docs.gitlab.com/charts/advanced/)
documentation for setup instructions including guidance on what GitLab secrets to sync
between Kubernetes and the backend components.
NOTE:
This is an **advanced** setup. Running services in Kubernetes is well known
to be complex. **This setup is only recommended** if you have strong working
@ -2361,7 +2365,7 @@ services where applicable):
| Internal load balancing node<sup>3</sup> | 1 | 4 vCPU, 3.6GB memory | `n1-highcpu-4` | `c5.xlarge` |
| Redis/Sentinel - Cache<sup>2</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| Redis/Sentinel - Persistent<sup>2</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| Gitaly<sup>5</sup><sup>6</sup> | 3 | 32 vCPU, 120 GB memory | `n1-standard-32` | `m5.8xlarge` |
| Gitaly<sup>5 6</sup> | 3 | 32 vCPU, 120 GB memory | `n1-standard-32` | `m5.8xlarge` |
| Praefect<sup>5</sup> | 3 | 4 vCPU, 3.6 GB memory | `n1-highcpu-4` | `c5.xlarge` |
| Praefect PostgreSQL<sup>1</sup> | 1+ | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Object storage<sup>4</sup> | - | - | - | - |

View File

@ -1023,7 +1023,9 @@ compute deployments. With this, _stateless_ components can benefit from cloud na
workload management benefits while _stateful_ components are deployed in compute VMs
with Omnibus to benefit from increased permanence.
The 2,000 reference architecture is not a highly-available setup. To achieve HA, you can follow a modified [3K reference architecture](3k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative).
Refer to the Helm charts [Advanced configuration](https://docs.gitlab.com/charts/advanced/)
documentation for setup instructions including guidance on what GitLab secrets to sync
between Kubernetes and the backend components.
NOTE:
This is an **advanced** setup. Running services in Kubernetes is well known
@ -1031,6 +1033,10 @@ to be complex. **This setup is only recommended** if you have strong working
knowledge and experience in Kubernetes. The rest of this
section assumes this.
NOTE:
The 2,000 reference architecture is not a highly-available setup. To achieve HA,
you can follow a modified [3K reference architecture](3k_users.md#cloud-native-hybrid-reference-architecture-with-helm-charts-alternative).
NOTE:
**Gitaly Cluster is not supported to be run in Kubernetes**.
Refer to [epic 6127](https://gitlab.com/groups/gitlab-org/-/epics/6127) for more details.

View File

@ -37,7 +37,7 @@ For a full list of reference architectures, see
| PostgreSQL<sup>1</sup> | 3 | 2 vCPU, 7.5 GB memory | `n1-standard-2` | `m5.large` |
| PgBouncer<sup>1</sup> | 3 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Internal load balancing node<sup>3</sup> | 1 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Gitaly<sup>5</sup><sup>6</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| Gitaly<sup>5 6</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| Praefect<sup>5</sup> | 3 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Praefect PostgreSQL<sup>1</sup> | 1+ | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Sidekiq | 4 | 2 vCPU, 7.5 GB memory | `n1-standard-2` | `m5.large` |
@ -2291,6 +2291,10 @@ compute deployments. With this, _stateless_ components can benefit from cloud na
workload management benefits while _stateful_ components are deployed in compute VMs
with Omnibus to benefit from increased permanence.
Refer to the Helm charts [Advanced configuration](https://docs.gitlab.com/charts/advanced/)
documentation for setup instructions including guidance on what GitLab secrets to sync
between Kubernetes and the backend components.
NOTE:
This is an **advanced** setup. Running services in Kubernetes is well known
to be complex. **This setup is only recommended** if you have strong working
@ -2332,7 +2336,7 @@ services where applicable):
| PostgreSQL<sup>1</sup> | 3 | 2 vCPU, 7.5 GB memory | `n1-standard-2` | `m5.large` |
| PgBouncer<sup>1</sup> | 3 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Internal load balancing node<sup>3</sup> | 1 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Gitaly<sup>5</sup><sup>6</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| Gitaly<sup>5 6</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| Praefect<sup>5</sup> | 3 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Praefect PostgreSQL<sup>1</sup> | 1+ | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Object storage<sup>4</sup> | - | - | - | - |

View File

@ -28,7 +28,7 @@ full list of reference architectures, see
| Internal load balancing node<sup>3</sup> | 1 | 8 vCPU, 7.2 GB memory | `n1-highcpu-8` | `c5.2xlarge` |
| Redis/Sentinel - Cache<sup>2</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| Redis/Sentinel - Persistent<sup>2</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| Gitaly<sup>5</sup><sup>6</sup> | 3 | 64 vCPU, 240 GB memory | `n1-standard-64` | `m5.16xlarge` |
| Gitaly<sup>5 6</sup> | 3 | 64 vCPU, 240 GB memory | `n1-standard-64` | `m5.16xlarge` |
| Praefect<sup>5</sup> | 3 | 4 vCPU, 3.6 GB memory | `n1-highcpu-4` | `c5.xlarge` |
| Praefect PostgreSQL<sup>1</sup> | 1+ | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Sidekiq | 4 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
@ -2321,6 +2321,10 @@ compute deployments. With this, _stateless_ components can benefit from cloud na
workload management benefits while _stateful_ components are deployed in compute VMs
with Omnibus to benefit from increased permanence.
Refer to the Helm charts [Advanced configuration](https://docs.gitlab.com/charts/advanced/)
documentation for setup instructions including guidance on what GitLab secrets to sync
between Kubernetes and the backend components.
NOTE:
This is an **advanced** setup. Running services in Kubernetes is well known
to be complex. **This setup is only recommended** if you have strong working
@ -2363,7 +2367,7 @@ services where applicable):
| Internal load balancing node<sup>3</sup> | 1 | 8 vCPU, 7.2 GB memory | `n1-highcpu-8` | `c5.2xlarge` |
| Redis/Sentinel - Cache<sup>2</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| Redis/Sentinel - Persistent<sup>2</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| Gitaly<sup>5</sup><sup>6</sup> | 3 | 64 vCPU, 240 GB memory | `n1-standard-64` | `m5.16xlarge` |
| Gitaly<sup>5 6</sup> | 3 | 64 vCPU, 240 GB memory | `n1-standard-64` | `m5.16xlarge` |
| Praefect<sup>5</sup> | 3 | 4 vCPU, 3.6 GB memory | `n1-highcpu-4` | `c5.xlarge` |
| Praefect PostgreSQL<sup>1</sup> | 1+ | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Object storage<sup>4</sup> | - | - | - | - |

View File

@ -34,7 +34,7 @@ costly-to-operate environment by using the
| PostgreSQL<sup>1</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| PgBouncer<sup>1</sup> | 3 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Internal load balancing node<sup>3</sup> | 1 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Gitaly<sup>5</sup><sup>6</sup> | 3 | 8 vCPU, 30 GB memory | `n1-standard-8` | `m5.2xlarge` |
| Gitaly<sup>5 6</sup> | 3 | 8 vCPU, 30 GB memory | `n1-standard-8` | `m5.2xlarge` |
| Praefect<sup>5</sup> | 3 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Praefect PostgreSQL<sup>1</sup> | 1+ | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Sidekiq | 4 | 2 vCPU, 7.5 GB memory | `n1-standard-2` | `m5.large` |
@ -2265,6 +2265,10 @@ compute deployments. With this, _stateless_ components can benefit from cloud na
workload management benefits while _stateful_ components are deployed in compute VMs
with Omnibus to benefit from increased permanence.
Refer to the Helm charts [Advanced configuration](https://docs.gitlab.com/charts/advanced/)
documentation for setup instructions including guidance on what GitLab secrets to sync
between Kubernetes and the backend components.
NOTE:
This is an **advanced** setup. Running services in Kubernetes is well known
to be complex. **This setup is only recommended** if you have strong working
@ -2306,7 +2310,7 @@ services where applicable):
| PostgreSQL<sup>1</sup> | 3 | 4 vCPU, 15 GB memory | `n1-standard-4` | `m5.xlarge` |
| PgBouncer<sup>1</sup> | 3 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Internal load balancing node<sup>3</sup> | 1 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Gitaly<sup>5</sup><sup>6</sup> | 3 | 8 vCPU, 30 GB memory | `n1-standard-8` | `m5.2xlarge` |
| Gitaly<sup>5 6</sup> | 3 | 8 vCPU, 30 GB memory | `n1-standard-8` | `m5.2xlarge` |
| Praefect<sup>5</sup> | 3 | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Praefect PostgreSQL<sup>1</sup> | 1+ | 2 vCPU, 1.8 GB memory | `n1-highcpu-2` | `c5.large` |
| Object storage<sup>4</sup> | - | - | - | - |

View File

@ -214,6 +214,57 @@ Returns [`Issue`](#issue).
| ---- | ---- | ----------- |
| <a id="queryissueid"></a>`id` | [`IssueID!`](#issueid) | Global ID of the issue. |
### `Query.issues`
Issues visible by the current user. Returns null if the `root_level_issues_query` feature flag is disabled.
WARNING:
**Introduced** in 15.6.
This feature is in Alpha. It can be changed or removed at any time.
Returns [`IssueConnection`](#issueconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#connection-pagination-arguments):
`before: String`, `after: String`, `first: Int`, `last: Int`.
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="queryissuesassigneeid"></a>`assigneeId` | [`String`](#string) | ID of a user assigned to the issues. Wildcard values "NONE" and "ANY" are supported. |
| <a id="queryissuesassigneeusername"></a>`assigneeUsername` **{warning-solid}** | [`String`](#string) | **Deprecated** in 13.11. Use `assigneeUsernames`. |
| <a id="queryissuesassigneeusernames"></a>`assigneeUsernames` | [`[String!]`](#string) | Usernames of users assigned to the issue. |
| <a id="queryissuesauthorusername"></a>`authorUsername` | [`String`](#string) | Username of the author of the issue. |
| <a id="queryissuesclosedafter"></a>`closedAfter` | [`Time`](#time) | Issues closed after this date. |
| <a id="queryissuesclosedbefore"></a>`closedBefore` | [`Time`](#time) | Issues closed before this date. |
| <a id="queryissuesconfidential"></a>`confidential` | [`Boolean`](#boolean) | Filter for confidential issues. If "false", excludes confidential issues. If "true", returns only confidential issues. |
| <a id="queryissuescreatedafter"></a>`createdAfter` | [`Time`](#time) | Issues created after this date. |
| <a id="queryissuescreatedbefore"></a>`createdBefore` | [`Time`](#time) | Issues created before this date. |
| <a id="queryissuescrmcontactid"></a>`crmContactId` | [`String`](#string) | ID of a contact assigned to the issues. |
| <a id="queryissuescrmorganizationid"></a>`crmOrganizationId` | [`String`](#string) | ID of an organization assigned to the issues. |
| <a id="queryissuesepicid"></a>`epicId` | [`String`](#string) | ID of an epic associated with the issues, "none" and "any" values are supported. |
| <a id="queryissueshealthstatusfilter"></a>`healthStatusFilter` | [`HealthStatusFilter`](#healthstatusfilter) | Health status of the issue, "none" and "any" values are supported. |
| <a id="queryissuesiid"></a>`iid` | [`String`](#string) | IID of the issue. For example, "1". |
| <a id="queryissuesiids"></a>`iids` | [`[String!]`](#string) | List of IIDs of issues. For example, `["1", "2"]`. |
| <a id="queryissuesin"></a>`in` | [`[IssuableSearchableField!]`](#issuablesearchablefield) | Specify the fields to perform the search in. Defaults to `[TITLE, DESCRIPTION]`. Requires the `search` argument.'. |
| <a id="queryissuesincludesubepics"></a>`includeSubepics` | [`Boolean`](#boolean) | Whether to include subepics when filtering issues by epicId. |
| <a id="queryissuesiterationid"></a>`iterationId` | [`[ID]`](#id) | List of iteration Global IDs applied to the issue. |
| <a id="queryissuesiterationwildcardid"></a>`iterationWildcardId` | [`IterationWildcardId`](#iterationwildcardid) | Filter by iteration ID wildcard. |
| <a id="queryissueslabelname"></a>`labelName` | [`[String]`](#string) | Labels applied to this issue. |
| <a id="queryissuesmilestonetitle"></a>`milestoneTitle` | [`[String]`](#string) | Milestone applied to this issue. |
| <a id="queryissuesmilestonewildcardid"></a>`milestoneWildcardId` | [`MilestoneWildcardId`](#milestonewildcardid) | Filter issues by milestone ID wildcard. |
| <a id="queryissuesmyreactionemoji"></a>`myReactionEmoji` | [`String`](#string) | Filter by reaction emoji applied by the current user. Wildcard values "NONE" and "ANY" are supported. |
| <a id="queryissuesnot"></a>`not` | [`NegatedIssueFilterInput`](#negatedissuefilterinput) | Negated arguments. |
| <a id="queryissuesor"></a>`or` | [`UnionedIssueFilterInput`](#unionedissuefilterinput) | List of arguments with inclusive OR. |
| <a id="queryissuessearch"></a>`search` | [`String`](#string) | Search query for title or description. |
| <a id="queryissuessort"></a>`sort` | [`IssueSort`](#issuesort) | Sort issues by this criteria. |
| <a id="queryissuesstate"></a>`state` | [`IssuableState`](#issuablestate) | Current state of this issue. |
| <a id="queryissuestypes"></a>`types` | [`[IssueType!]`](#issuetype) | Filter issues by the given issue types. |
| <a id="queryissuesupdatedafter"></a>`updatedAfter` | [`Time`](#time) | Issues updated after this date. |
| <a id="queryissuesupdatedbefore"></a>`updatedBefore` | [`Time`](#time) | Issues updated before this date. |
| <a id="queryissuesweight"></a>`weight` | [`String`](#string) | Weight applied to the issue, "none" and "any" values are supported. |
### `Query.iteration`
Find an iteration.

View File

@ -78,45 +78,45 @@ paths:
metadata:
get:
tags:
- metadata
summary: 'Retrieve metadata information for this GitLab instance.'
operationId: 'getMetadata'
responses:
'401':
description: 'unauthorized operation'
'200':
description: 'successful operation'
content:
'application/json':
schema:
title: 'MetadataResponse'
type: 'object'
properties:
version:
type: 'string'
revision:
type: 'string'
kas:
type: 'object'
properties:
enabled:
type: 'boolean'
externalUrl:
type: 'string'
nullable: true
version:
type: 'string'
nullable: true
examples:
Example:
value:
version: '15.0-pre'
revision: 'c401a659d0c'
tags:
- metadata
summary: 'Retrieve metadata information for this GitLab instance.'
operationId: 'getMetadata'
responses:
'401':
description: 'unauthorized operation'
'200':
description: 'successful operation'
content:
'application/json':
schema:
title: 'MetadataResponse'
type: 'object'
properties:
version:
type: 'string'
revision:
type: 'string'
kas:
enabled: true
externalUrl: 'grpc://gitlab.example.com:8150'
version: '15.0.0'
type: 'object'
properties:
enabled:
type: 'boolean'
externalUrl:
type: 'string'
nullable: true
version:
type: 'string'
nullable: true
examples:
Example:
value:
version: '15.0-pre'
revision: 'c401a659d0c'
kas:
enabled: true
externalUrl: 'grpc://gitlab.example.com:8150'
version: '15.0.0'
version:
get:

View File

@ -373,12 +373,12 @@ https://gitlab.example.com/<namespace>/<project>/badges/<branch>/coverage.svg
```
To get the coverage report from a specific job, add
the `job=coverage_job_name` parameter to the URL. For example, the following
Markdown code embeds the test coverage report badge of the `coverage` job
in your `README.md`:
the `job=coverage_job_name` parameter to the URL. For example, you can use code
similar to the following to add the test coverage report badge of the `coverage` job
to a Markdown file:
```markdown
![coverage](https://gitlab.com/gitlab-org/gitlab/badges/main/coverage.svg?job=coverage)
![coverage](https://gitlab.example.com/<namespace>/<project>/badges/<branch>/coverage.svg?job=coverage)
```
#### Test coverage report badge colors and limits

View File

@ -196,7 +196,8 @@ Use these environment variables to configure metrics export:
| `QA_INFLUXDB_URL` | `true` | Should be set to `https://influxdb.quality.gitlab.net`. No default value. |
| `QA_INFLUXDB_TOKEN` | `true` | InfluxDB write token that can be found under `Influxdb auth tokens` document in `Gitlab-QA` `1Password` vault. No default value. |
| `QA_RUN_TYPE` | `false` | Arbitrary name for test execution, like `package-and-test`. Automatically inferred from the project name for live environment test executions. No default value. |
| `QA_EXPORT_TEST_METRICS` | `false` | Flag to enable or disable metrics export. Defaults to `true`. |
| `QA_EXPORT_TEST_METRICS` | `false` | Flag to enable or disable metrics export to InfluxDB. Defaults to `false`. |
| `QA_SAVE_TEST_METRICS` | `false` | Flag to enable or disable saving metrics as JSON file. Defaults to `false`. |
## Test reports

View File

@ -640,6 +640,7 @@ The following variables are used for configuring specific analyzers (used for a
| `GEMNASIUM_DB_REMOTE_URL` | `gemnasium` | `https://gitlab.com/gitlab-org/security-products/gemnasium-db.git` | Repository URL for fetching the Gemnasium database. |
| `GEMNASIUM_DB_REF_NAME` | `gemnasium` | `master` | Branch name for remote repository database. `GEMNASIUM_DB_REMOTE_URL` is required. |
| `DS_REMEDIATE` | `gemnasium` | `"true"`, `"false"` in FIPS mode | Enable automatic remediation of vulnerable dependencies. Not supported in FIPS mode. |
| `DS_REMEDIATE_TIMEOUT` | `gemnasium` | `5m` | Timeout for auto-remediation. |
| `GEMNASIUM_LIBRARY_SCAN_ENABLED` | `gemnasium` | `"true"` | Enable detecting vulnerabilities in vendored JavaScript libraries. For now, `gemnasium` leverages [`Retire.js`](https://github.com/RetireJS/retire.js) to do this job. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/350512) in GitLab 14.8. |
| `DS_JAVA_VERSION` | `gemnasium-maven` | `17` | Version of Java. Available versions: `8`, `11`, `13`, `14`, `15`, `16`, `17`. Available versions in FIPS-enabled image: `8`, `11`, `17`. |
| `MAVEN_CLI_OPTS` | `gemnasium-maven` | `"-DskipTests --batch-mode"` | List of command line arguments that are passed to `maven` by the analyzer. See an example for [using private repositories](../index.md#using-private-maven-repositories). |

View File

@ -5,9 +5,5 @@ module Gitlab
def self.hostname
@hostname ||= ENV['HOSTNAME'] || Socket.gethostname
end
def self.qa_user_agent
ENV['GITLAB_QA_USER_AGENT']
end
end
end

View File

@ -65,6 +65,7 @@ module Gitlab
push_frontend_feature_flag(:new_header_search)
push_frontend_feature_flag(:source_editor_toolbar)
push_frontend_feature_flag(:integration_slack_app_notifications)
push_frontend_feature_flag(:vue_group_select)
end
# Exposes the state of a feature flag to the frontend code.

17
lib/gitlab/qa.rb Normal file
View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
module Gitlab
module Qa
def self.user_agent
ENV['GITLAB_QA_USER_AGENT']
end
def self.request?(request_user_agent)
return false unless Gitlab.com?
return false unless request_user_agent.present?
return false unless user_agent.present?
ActiveSupport::SecurityUtils.secure_compare(request_user_agent, user_agent)
end
end
end

View File

@ -0,0 +1,54 @@
# frozen_string_literal: true
module Gitlab
module Usage
module Metrics
module Instrumentations
class InProductMarketingEmailCtaClickedMetric < DatabaseMetric
operation :count
def initialize(metric_definition)
super
unless track.in?(allowed_track)
raise ArgumentError, "track '#{track}' must be one of: #{allowed_track.join(', ')}"
end
return if series.in?(allowed_series)
raise ArgumentError, "series '#{series}' must be one of: #{allowed_series.join(', ')}"
end
relation { Users::InProductMarketingEmail }
private
def relation
scope = super.where.not(cta_clicked_at: nil)
scope = scope.where(series: series)
scope.where(track: track)
end
def track
options[:track]
end
def series
options[:series]
end
def allowed_track
Users::InProductMarketingEmail::ACTIVE_TRACKS.keys
end
def allowed_series
@allowed_series ||= begin
series_amount = Namespaces::InProductMarketingEmailsService.email_count_for_track(track)
0.upto(series_amount - 1).to_a
end
end
end
end
end
end
end

View File

@ -0,0 +1,54 @@
# frozen_string_literal: true
module Gitlab
module Usage
module Metrics
module Instrumentations
class InProductMarketingEmailSentMetric < DatabaseMetric
operation :count
def initialize(metric_definition)
super
unless track.in?(allowed_track)
raise ArgumentError, "track '#{track}' must be one of: #{allowed_track.join(', ')}"
end
return if series.in?(allowed_series)
raise ArgumentError, "series '#{series}' must be one of: #{allowed_series.join(', ')}"
end
relation { Users::InProductMarketingEmail }
private
def relation
scope = super
scope = scope.where(series: series)
scope.where(track: track)
end
def track
options[:track]
end
def series
options[:series]
end
def allowed_track
Users::InProductMarketingEmail::ACTIVE_TRACKS.keys
end
def allowed_series
@allowed_series ||= begin
series_amount = Namespaces::InProductMarketingEmailsService.email_count_for_track(track)
0.upto(series_amount - 1).to_a
end
end
end
end
end
end
end

View File

@ -158,8 +158,7 @@ module Gitlab
integrations_usage,
user_preferences_usage,
container_expiration_policies_usage,
service_desk_counts,
email_campaign_counts
service_desk_counts
).tap do |data|
data[:snippets] = add(data[:personal_snippets], data[:project_snippets])
end
@ -708,37 +707,6 @@ module Gitlab
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def email_campaign_counts
# rubocop:disable UsageData/LargeTable
sent_emails = count(Users::InProductMarketingEmail.group(:track, :series))
clicked_emails = count(Users::InProductMarketingEmail.where.not(cta_clicked_at: nil).group(:track, :series))
Users::InProductMarketingEmail::ACTIVE_TRACKS.keys.each_with_object({}) do |track, result|
series_amount = Namespaces::InProductMarketingEmailsService.email_count_for_track(track)
# rubocop: enable UsageData/LargeTable:
0.upto(series_amount - 1).map do |series|
sent_count = sent_in_product_marketing_email_count(sent_emails, track, series)
clicked_count = clicked_in_product_marketing_email_count(clicked_emails, track, series)
result["in_product_marketing_email_#{track}_#{series}_sent"] = sent_count
result["in_product_marketing_email_#{track}_#{series}_cta_clicked"] = clicked_count unless track == 'experience'
end
end
end
# rubocop: enable CodeReuse/ActiveRecord
def sent_in_product_marketing_email_count(sent_emails, track, series)
# When there is an error with the query and it's not the Hash we expect, we return what we got from `count`.
sent_emails.is_a?(Hash) ? sent_emails.fetch([track, series], 0) : sent_emails
end
def clicked_in_product_marketing_email_count(clicked_emails, track, series)
# When there is an error with the query and it's not the Hash we expect, we return what we got from `count`.
clicked_emails.is_a?(Hash) ? clicked_emails.fetch([track, series], 0) : clicked_emails
end
def total_alert_issues
# Remove prometheus table queries once they are deprecated
# To be removed with https://gitlab.com/gitlab-org/gitlab/-/issues/217407.

View File

@ -2954,7 +2954,7 @@ msgstr ""
msgid "AdminSettings|Select a CI/CD template"
msgstr ""
msgid "AdminSettings|Select a group to use as the source for instance-level project templates."
msgid "AdminSettings|Select a group to use as a source of custom templates for new projects. %{link_start}Learn more%{link_end}."
msgstr ""
msgid "AdminSettings|Select to disable public access for Pages sites, which requires users to sign in for access to the Pages sites in your instance. %{link_start}Learn more.%{link_end}"
@ -3017,9 +3017,6 @@ msgstr ""
msgid "AdminSettings|The latest artifacts for all jobs in the most recent successful pipelines in each project are stored and do not expire."
msgstr ""
msgid "AdminSettings|The projects in this group can be selected as templates for new projects created on the instance. %{link_start}Learn more.%{link_end} "
msgstr ""
msgid "AdminSettings|The template for the required pipeline configuration can be one of the GitLab-provided templates, or a custom template added to an instance template repository. %{link_start}How do I create an instance template repository?%{link_end}"
msgstr ""
@ -15164,6 +15161,9 @@ msgstr ""
msgid "Enter at least three characters to search"
msgstr ""
msgid "Enter at least three characters to search."
msgstr ""
msgid "Enter in your Bitbucket Server URL and personal access token below"
msgstr ""
@ -19425,7 +19425,7 @@ msgstr ""
msgid "GroupSettings|Reporting"
msgstr ""
msgid "GroupSettings|Select a subgroup to use as the source for custom project templates for this group."
msgid "GroupSettings|Select a subgroup to use as a source of custom templates for new projects in this group. %{link_start}Learn more%{link_end}."
msgstr ""
msgid "GroupSettings|Select parent group"
@ -19446,9 +19446,6 @@ msgstr ""
msgid "GroupSettings|The Auto DevOps pipeline runs if no alternative CI configuration file is found."
msgstr ""
msgid "GroupSettings|The projects in this subgroup can be selected as templates for new projects created in the group. %{link_start}Learn more.%{link_end}"
msgstr ""
msgid "GroupSettings|There was a problem updating Auto DevOps pipeline: %{error_messages}."
msgstr ""
@ -27304,6 +27301,9 @@ msgstr ""
msgid "No results found"
msgstr ""
msgid "No results found."
msgstr ""
msgid "No runner executable"
msgstr ""
@ -37033,6 +37033,9 @@ msgstr ""
msgid "Select a file from the left sidebar to begin editing. Afterwards, you'll be able to commit your changes."
msgstr ""
msgid "Select a group"
msgstr ""
msgid "Select a label"
msgstr ""
@ -41007,9 +41010,6 @@ msgstr ""
msgid "There are no abuse reports!"
msgstr ""
msgid "There are no archived projects yet"
msgstr ""
msgid "There are no archived requirements"
msgstr ""
@ -41067,9 +41067,6 @@ msgstr ""
msgid "There are no packages yet"
msgstr ""
msgid "There are no projects shared with this group yet"
msgstr ""
msgid "There are no secure files yet."
msgstr ""
@ -43212,6 +43209,12 @@ msgstr ""
msgid "Unable to fetch branches list, please close the form and try again"
msgstr ""
msgid "Unable to fetch group. Reload the page to try again."
msgstr ""
msgid "Unable to fetch groups. Reload the page to try again."
msgstr ""
msgid "Unable to fetch upstream and downstream pipelines."
msgstr ""

View File

@ -443,7 +443,11 @@ module QA
end
def export_metrics?
running_in_ci? && enabled?(ENV['QA_EXPORT_TEST_METRICS'], default: true)
enabled?(ENV['QA_EXPORT_TEST_METRICS'], default: false)
end
def save_metrics_json?
enabled?(ENV['QA_SAVE_TEST_METRICS'], default: false)
end
def ee_activation_code

View File

@ -24,7 +24,7 @@ RSpec.configure do |config|
config.add_formatter QA::Support::Formatters::ContextFormatter
config.add_formatter QA::Support::Formatters::QuarantineFormatter
config.add_formatter QA::Support::Formatters::FeatureFlagFormatter
config.add_formatter QA::Support::Formatters::TestStatsFormatter if QA::Runtime::Env.export_metrics?
config.add_formatter QA::Support::Formatters::TestMetricsFormatter if QA::Runtime::Env.running_in_ci?
config.before(:suite) do |suite|
QA::Resource::ReusableCollection.register_resource_classes do |collection|

View File

@ -1,9 +1,11 @@
# frozen_string_literal: true
require "active_support/core_ext/string/conversions"
module QA
module Support
module Formatters
class TestStatsFormatter < RSpec::Core::Formatters::BaseFormatter
class TestMetricsFormatter < RSpec::Core::Formatters::BaseFormatter
include Support::InfluxdbTools
RSpec::Core::Formatters.register(self, :stop)
@ -13,29 +15,43 @@ module QA
# @param [RSpec::Core::Notifications::ExamplesNotification] notification
# @return [void]
def stop(notification)
push_test_stats(notification.examples)
push_fabrication_stats
return log(:warn, "Missing run_type, skipping metrics export!") unless run_type
parse_execution_data(notification.examples)
if Runtime::Env.export_metrics?
push_test_metrics
push_fabrication_metrics
end
save_test_metrics if Runtime::Env.save_metrics_json?
end
private
# Push test execution stats to influxdb
# Save execution data for the run
#
# @param [Array<RSpec::Core::Example>] examples
# @return [void]
def push_test_stats(examples)
data = examples.map { |example| test_stats(example) }.compact
write_api.write(data: data)
log(:debug, "Pushed #{data.length} test execution entries to influxdb")
rescue StandardError => e
log(:error, "Failed to push test execution stats to influxdb, error: #{e}")
# @return [Array<Hash>]
def execution_data(examples = nil)
@execution_metrics ||= examples.map { |example| test_stats(example) }.compact
end
alias_method :parse_execution_data, :execution_data
# Push resource fabrication stats to influxdb
# Push test execution metrics to influxdb
#
# @return [void]
def push_fabrication_stats
def push_test_metrics
write_api.write(data: execution_data)
log(:debug, "Pushed #{execution_data.length} test execution entries to influxdb")
rescue StandardError => e
log(:error, "Failed to push test execution metrics to influxdb, error: #{e}")
end
# Push resource fabrication metrics to influxdb
#
# @return [void]
def push_fabrication_metrics
data = Tools::TestResourceDataProcessor.resources.flat_map do |resource, values|
values.map { |v| fabrication_stats(resource: resource, **v) }
end
@ -44,7 +60,16 @@ module QA
write_api.write(data: data)
log(:debug, "Pushed #{data.length} resource fabrication entries to influxdb")
rescue StandardError => e
log(:error, "Failed to push fabrication stats to influxdb, error: #{e}")
log(:error, "Failed to push fabrication metrics to influxdb, error: #{e}")
end
# Save metrics in json file
#
# @return [void]
def save_test_metrics
File.write("tmp/test-metrics-#{env('CI_JOB_NAME_SLUG') || 'local'}.json", execution_data.to_json)
rescue StandardError => e
log(:error, "Failed to save test execution metrics, error: #{e}")
end
# Transform example to influxdb compatible metrics data
@ -57,6 +82,9 @@ module QA
api_fabrication = ((example.metadata[:api_fabrication] || 0) * 1000).round
ui_fabrication = ((example.metadata[:browser_ui_fabrication] || 0) * 1000).round
# do not export results for tests that are not compatible with environment
return if incompatible_env?(example)
{
name: 'test-stats',
time: time,
@ -101,6 +129,7 @@ module QA
# @param [Symbol] fabrication_method
# @param [Symbol] http_method
# @param [Integer] fabrication_time
# @param [String] timestamp
# @return [Hash]
def fabrication_stats(resource:, info:, fabrication_method:, http_method:, fabrication_time:, timestamp:, **)
{
@ -136,7 +165,7 @@ module QA
@time ||= begin
return Time.now unless env('CI_PIPELINE_CREATED_AT')
DateTime.strptime(env('CI_PIPELINE_CREATED_AT')).to_time
env('CI_PIPELINE_CREATED_AT').to_time
end
end
@ -172,6 +201,17 @@ module QA
retry_attempts(example.metadata) > 0 ? :flaky : :passed
end
# Check if test was skipped due to context condition
#
# @param [RSpec::Core::Example] example
# @return [Boolean]
def incompatible_env?(example)
return false unless example.execution_result.status == :pending
return false unless example.metadata[:skip]
!example.metadata[:skip].to_s.include?("quarantine") # rubocop:disable Rails/NegateInclude
end
# Retry attempts
#
# @param [Hash] metadata

View File

@ -7,7 +7,11 @@ module QA
# Common tools for use with influxdb metrics setup
#
module InfluxdbTools
# @return [String] bucket for storing all test run metrics
INFLUX_TEST_METRICS_BUCKET = "e2e-test-stats"
# @return [String] bucket for storing metrics from main runs
INFLUX_MAIN_TEST_METRICS_BUCKET = "e2e-test-stats-main"
# @return [Array] live environment names
LIVE_ENVS = %w[staging staging-canary staging-ref canary preprod production].freeze
private

View File

@ -0,0 +1,52 @@
# frozen_string_literal: true
require "active_support/core_ext/string/conversions"
module QA
module Tools
module Ci
class TestMetrics
include Helpers
include Support::InfluxdbTools
def initialize(metrics_file_glob)
@metrics_file_glob = metrics_file_glob
end
def self.export(metrics_file_glob)
new(metrics_file_glob).export
end
# Export metrics to main bucket
#
# @return [void]
def export
return logger.warn("No files matched pattern '#{metrics_file_glob}'") if metrics_files.empty?
logger.info("Exporting #{metrics_data.size} entries to influxdb")
influx_client.create_write_api.write(data: metrics_data, bucket: INFLUX_MAIN_TEST_METRICS_BUCKET)
end
private
attr_reader :metrics_file_glob
# Metrics data files
#
# @return [Array]
def metrics_files
@metrics_files ||= Dir.glob(metrics_file_glob)
end
# Test metrics data
#
# @return [Array<Hash>]
def metrics_data
@metrics_data ||= metrics_files
.flat_map { |file| JSON.parse(File.read(file), symbolize_names: true) }
.map { |entry| entry.merge(time: entry[:time].to_time) }
end
end
end
end
end

View File

@ -3,7 +3,7 @@
require 'rspec/core/sandbox'
require 'active_support/testing/time_helpers'
describe QA::Support::Formatters::TestStatsFormatter do
describe QA::Support::Formatters::TestMetricsFormatter do
include QA::Support::Helpers::StubEnv
include QA::Specs::Helpers::RSpec
include ActiveSupport::Testing::TimeHelpers
@ -58,7 +58,7 @@ describe QA::Support::Formatters::TestStatsFormatter do
testcase: testcase
},
fields: {
id: './spec/support/formatters/test_stats_formatter_spec.rb[1:1]',
id: './spec/support/formatters/test_metrics_formatter_spec.rb[1:1]',
run_time: 0,
api_fabrication: api_fabrication * 1000,
ui_fabrication: ui_fabrication * 1000,
@ -133,6 +133,7 @@ describe QA::Support::Formatters::TestStatsFormatter do
stub_env('CI_MERGE_REQUEST_IID', nil)
stub_env('TOP_UPSTREAM_MERGE_REQUEST_IID', nil)
stub_env('QA_RUN_TYPE', run_type)
stub_env('QA_EXPORT_TEST_METRICS', "true")
end
context 'with reliable spec' do
@ -204,6 +205,20 @@ describe QA::Support::Formatters::TestStatsFormatter do
end
end
context 'with skipped spec' do
it 'skips export' do
run_spec do
it(
'spec',
skip: 'not compatible',
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/1234'
) {}
end
expect(influx_write_api).to have_received(:write).with(data: [])
end
end
context 'with staging full run' do
let(:run_type) { 'staging-full' }
@ -301,5 +316,21 @@ describe QA::Support::Formatters::TestStatsFormatter do
expect(influx_write_api).to have_received(:write).with(data: [fabrication_data])
end
end
context 'with persisting metrics' do
before do
stub_env('QA_EXPORT_TEST_METRICS', "false")
stub_env('QA_SAVE_TEST_METRICS', "true")
stub_env('CI_JOB_NAME_SLUG', "test-job")
allow(File).to receive(:write)
end
it 'saves test metrics as json files' do
run_spec
expect(File).to have_received(:write).with("tmp/test-metrics-test-job.json", [data].to_json)
end
end
end
end

View File

@ -0,0 +1,54 @@
# frozen_string_literal: true
RSpec.describe QA::Tools::Ci::TestMetrics do
include QA::Support::Helpers::StubEnv
let(:influx_client) { instance_double("InfluxDB2::Client", create_write_api: influx_write_api) }
let(:influx_write_api) { instance_double("InfluxDB2::WriteApi", write: nil) }
let(:logger) { instance_double("Logger", info: true, warn: true) }
let(:glob) { "metrics_glob/*.json" }
let(:paths) { ["/metrics_glob/metrics.json"] }
let(:timestamp) { "2022-11-11 07:54:11 +0000" }
let(:metrics_json) { metrics_data.to_json }
let(:metrics_data) do
[
{
time: timestamp.to_time,
name: "name",
tags: {},
fields: {}
}
]
end
before do
allow(InfluxDB2::Client).to receive(:new) { influx_client }
allow(Gitlab::QA::TestLogger).to receive(:logger) { logger }
allow(Dir).to receive(:glob).with(glob) { paths }
allow(File).to receive(:read).with(paths.first) { metrics_json }
stub_env('QA_INFLUXDB_URL', "test")
stub_env('QA_INFLUXDB_TOKEN', "test")
end
context "with metrics files present" do
it "exports saved metrics to influxdb" do
described_class.export(glob)
expect(influx_write_api).to have_received(:write).with(data: metrics_data, bucket: "e2e-test-stats-main")
end
end
context "without metrics files present" do
let(:paths) { [] }
it "exits without error" do
described_class.export(glob)
expect(influx_write_api).not_to have_received(:write)
expect(logger).to have_received(:warn).with("No files matched pattern '#{glob}'")
end
end
end

View File

@ -1,8 +1,8 @@
# frozen_string_literal: true
require_relative "helpers/util"
namespace :ci do
require_relative "helpers/util"
include Task::Helpers::Util
desc "Detect changes and populate test variables for selective test execution and feature flag testing"
@ -58,4 +58,11 @@ namespace :ci do
task :download_test_results, [:trigger_name, :test_report_job_name, :report_path] do |_, args|
QA::Tools::Ci::TestResults.get(args[:trigger_name], args[:test_report_job_name], args[:report_path])
end
desc "Export test run metrics to influxdb"
task :export_test_metrics, [:glob] do |_, args|
raise("Metrics file glob pattern is required") unless args[:glob]
QA::Tools::Ci::TestMetrics.export(args[:glob])
end
end

View File

@ -25,6 +25,8 @@ variables:
GITLAB_QA_CACHE_KEY: "$qa_cache_key"
GITLAB_VERSION: "$(cat VERSION)"
COLORIZED_LOGS: "true"
QA_EXPORT_TEST_METRICS: "${QA_EXPORT_TEST_METRICS:-true}"
QA_SAVE_TEST_METRICS: "${QA_SAVE_TEST_METRICS:-false}"
QA_RUN_ALL_TESTS: "${QA_RUN_ALL_TESTS:-false}"
QA_FRAMEWORK_CHANGES: "${QA_FRAMEWORK_CHANGES:-false}"
QA_FEATURE_FLAGS: "${QA_FEATURE_FLAGS}"

View File

@ -289,27 +289,13 @@ RSpec.describe SendFileUpload do
allow(Gitlab.config.uploads.object_store).to receive(:proxy_download) { false }
end
context 'when use_cdn_with_job_artifacts_ui_downloads feature is enabled' do
it 'sends a file when CDN URL' do
expect(uploader).to receive(:use_cdn?).and_return(true)
expect(uploader).to receive(:cdn_provider).and_return(cdn_provider)
expect(controller).to receive(:request).and_return(request)
expect(controller).to receive(:redirect_to).with(signed_url)
it 'sends a file when CDN URL' do
expect(uploader).to receive(:use_cdn?).and_return(true)
expect(uploader).to receive(:cdn_provider).and_return(cdn_provider)
expect(controller).to receive(:request).and_return(request)
expect(controller).to receive(:redirect_to).with(signed_url)
subject
end
end
context 'when use_cdn_with_job_artifacts_ui_downloads is disabled' do
before do
stub_feature_flags(use_cdn_with_job_artifacts_ui_downloads: false)
end
it 'sends a file' do
expect(controller).to receive(:redirect_to).with(/#{uploader.path}/)
subject
end
subject
end
end
end

View File

@ -191,28 +191,13 @@ RSpec.describe Projects::ArtifactsController do
request.env['action_dispatch.remote_ip'] = '18.245.0.42'
end
context 'with use_cdn_with_job_artifacts_ui_downloads enabled' do
it 'redirects to a Google CDN request' do
expect(Gitlab::ApplicationContext).to receive(:push).with(artifact: an_instance_of(Ci::JobArtifact)).and_call_original
expect(Gitlab::ApplicationContext).to receive(:push).with(artifact_used_cdn: true).and_call_original
it 'redirects to a Google CDN request' do
expect(Gitlab::ApplicationContext).to receive(:push).with(artifact: an_instance_of(Ci::JobArtifact)).and_call_original
expect(Gitlab::ApplicationContext).to receive(:push).with(artifact_used_cdn: true).and_call_original
download_artifact(file_type: file_type)
download_artifact(file_type: file_type)
expect(response.redirect_url).to start_with("https://cdn.example.org/")
end
end
context 'with use_cdn_with_job_artifacts_ui_downloads disabled' do
before do
stub_feature_flags(use_cdn_with_job_artifacts_ui_downloads: false)
end
it 'does not redirect to the CDN' do
download_artifact(file_type: file_type)
expect(response.redirect_url).to be_present
expect(response.redirect_url).not_to start_with("https://cdn.example.org/")
end
expect(response.redirect_url).to start_with("https://cdn.example.org/")
end
end
end

View File

@ -0,0 +1,202 @@
import { nextTick } from 'vue';
import { GlListbox } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import axios from '~/lib/utils/axios_utils';
import { createAlert } from '~/flash';
import GroupSelect from '~/vue_shared/components/group_select/group_select.vue';
import {
TOGGLE_TEXT,
FETCH_GROUPS_ERROR,
FETCH_GROUP_ERROR,
QUERY_TOO_SHORT_MESSAGE,
} from '~/vue_shared/components/group_select/constants';
import waitForPromises from 'helpers/wait_for_promises';
jest.mock('~/flash');
describe('GroupSelect', () => {
let wrapper;
let mock;
// Mocks
const groupMock = {
full_name: 'selectedGroup',
id: '1',
};
const groupEndpoint = `/api/undefined/groups/${groupMock.id}`;
// Props
const inputName = 'inputName';
const inputId = 'inputId';
// Finders
const findListbox = () => wrapper.findComponent(GlListbox);
const findInput = () => wrapper.findByTestId('input');
// Helpers
const createComponent = ({ props = {} } = {}) => {
wrapper = shallowMountExtended(GroupSelect, {
propsData: {
inputName,
inputId,
...props,
},
});
};
const openListbox = () => findListbox().vm.$emit('shown');
const search = (searchString) => findListbox().vm.$emit('search', searchString);
const createComponentWithGroups = () => {
mock.onGet('/api/undefined/groups.json').reply(200, [groupMock]);
createComponent();
openListbox();
return waitForPromises();
};
const selectGroup = () => {
findListbox().vm.$emit('select', groupMock.id);
return nextTick();
};
beforeEach(() => {
mock = new MockAdapter(axios);
});
afterEach(() => {
mock.restore();
});
describe('on mount', () => {
it('fetches groups when the listbox is opened', async () => {
createComponent();
await waitForPromises();
expect(mock.history.get).toHaveLength(0);
openListbox();
await waitForPromises();
expect(mock.history.get).toHaveLength(1);
});
describe('with an initial selection', () => {
it('if the selected group is not part of the fetched list, fetches it individually', async () => {
mock.onGet(groupEndpoint).reply(200, groupMock);
createComponent({ props: { initialSelection: groupMock.id } });
await waitForPromises();
expect(mock.history.get).toHaveLength(1);
expect(findListbox().props('toggleText')).toBe(groupMock.full_name);
});
it('show an error if fetching the individual group fails', async () => {
mock
.onGet('/api/undefined/groups.json')
.reply(200, [{ full_name: 'notTheSelectedGroup', id: '2' }]);
mock.onGet(groupEndpoint).reply(500);
createComponent({ props: { initialSelection: groupMock.id } });
await waitForPromises();
expect(createAlert).toHaveBeenCalledWith({
message: FETCH_GROUP_ERROR,
error: expect.any(Error),
parent: wrapper.vm.$el,
});
});
});
});
it('shows an error when fetching groups fails', async () => {
mock.onGet('/api/undefined/groups.json').reply(500);
createComponent();
openListbox();
await waitForPromises();
expect(createAlert).toHaveBeenCalledWith({
message: FETCH_GROUPS_ERROR,
error: expect.any(Error),
parent: wrapper.vm.$el,
});
});
describe('selection', () => {
it('uses the default toggle text while no group is selected', async () => {
await createComponentWithGroups();
expect(findListbox().props('toggleText')).toBe(TOGGLE_TEXT);
});
describe('once a group is selected', () => {
it(`uses the selected group's name as the toggle text`, async () => {
await createComponentWithGroups();
await selectGroup();
expect(findListbox().props('toggleText')).toBe(groupMock.full_name);
});
it(`uses the selected group's ID as the listbox' and input value`, async () => {
await createComponentWithGroups();
await selectGroup();
expect(findListbox().attributes('selected')).toBe(groupMock.id);
expect(findInput().attributes('value')).toBe(groupMock.id);
});
it(`on reset, falls back to the default toggle text`, async () => {
await createComponentWithGroups();
await selectGroup();
findListbox().vm.$emit('reset');
await nextTick();
expect(findListbox().props('toggleText')).toBe(TOGGLE_TEXT);
});
});
});
describe('search', () => {
it('sets `searching` to `true` when first opening the dropdown', async () => {
createComponent();
expect(findListbox().props('searching')).toBe(false);
openListbox();
await nextTick();
expect(findListbox().props('searching')).toBe(true);
});
it('sets `searching` to `true` while searching', async () => {
await createComponentWithGroups();
expect(findListbox().props('searching')).toBe(false);
search('foo');
await nextTick();
expect(findListbox().props('searching')).toBe(true);
});
it('fetches groups matching the search string', async () => {
const searchString = 'searchString';
await createComponentWithGroups();
expect(mock.history.get).toHaveLength(1);
search(searchString);
await waitForPromises();
expect(mock.history.get).toHaveLength(2);
expect(mock.history.get[1].params).toStrictEqual({ search: searchString });
});
it('shows a notice if the search query is too short', async () => {
const searchString = 'a';
await createComponentWithGroups();
search(searchString);
await waitForPromises();
expect(mock.history.get).toHaveLength(1);
expect(findListbox().props('noResultsText')).toBe(QUERY_TOO_SHORT_MESSAGE);
});
});
});

View File

@ -469,32 +469,6 @@ RSpec.describe GroupsHelper do
end
end
describe '#subgroups_and_projects_list_app_data' do
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
before do
allow(helper).to receive(:current_user).and_return(user)
allow(helper).to receive(:can?).with(user, :create_subgroup, group) { true }
allow(helper).to receive(:can?).with(user, :create_projects, group) { true }
end
it 'returns expected hash' do
expect(helper.subgroups_and_projects_list_app_data(group)).to match({
show_schema_markup: 'true',
new_subgroup_path: including("groups/new?parent_id=#{group.id}#create-group-pane"),
new_project_path: including("/projects/new?namespace_id=#{group.id}"),
new_subgroup_illustration: including('illustrations/subgroup-create-new-sm'),
new_project_illustration: including('illustrations/project-create-new-sm'),
empty_subgroup_illustration: including('illustrations/empty-state/empty-subgroup-md'),
render_empty_state: 'true',
can_create_subgroups: 'true',
can_create_projects: 'true'
})
end
end
describe '#group_overview_tabs_app_data' do
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
@ -515,8 +489,17 @@ RSpec.describe GroupsHelper do
shared_projects_endpoint: including("/groups/#{group.path}/-/shared_projects.json"),
archived_projects_endpoint: including("/groups/#{group.path}/-/children.json?archived=only"),
current_group_visibility: group.visibility,
initial_sort: initial_sort
}.merge(helper.group_overview_tabs_app_data(group))
initial_sort: initial_sort,
show_schema_markup: 'true',
new_subgroup_path: including("groups/new?parent_id=#{group.id}#create-group-pane"),
new_project_path: including("/projects/new?namespace_id=#{group.id}"),
new_subgroup_illustration: including('illustrations/subgroup-create-new-sm'),
new_project_illustration: including('illustrations/project-create-new-sm'),
empty_subgroup_illustration: including('illustrations/empty-state/empty-subgroup-md'),
render_empty_state: 'true',
can_create_subgroups: 'true',
can_create_projects: 'true'
}
)
end
end

View File

@ -10,29 +10,12 @@ RSpec.describe RecaptchaHelper, type: :helper do
end
shared_examples 'Gitlab QA bypass' do
context 'when GITLAB_QA_USER_AGENT env var is present' do
using RSpec::Parameterized::TableSyntax
where(:dot_com, :user_agent, :qa_user_agent, :result) do
false | 'qa_user_agent' | 'qa_user_agent' | true
true | nil | 'qa_user_agent' | true
true | '' | 'qa_user_agent' | true
true | 'qa_user_agent' | '' | true
true | 'qa_user_agent' | nil | true
true | 'qa_user_agent' | 'qa_user_agent' | false
context 'when it is a QA request' do
before do
allow(Gitlab::Qa).to receive(:request?).and_return(true)
end
with_them do
before do
allow(Gitlab).to receive(:com?).and_return(dot_com)
stub_env('GITLAB_QA_USER_AGENT', qa_user_agent)
request_double = instance_double(ActionController::TestRequest, user_agent: user_agent)
allow(helper).to receive(:request).and_return(request_double)
end
it { is_expected.to eq result }
end
it { is_expected.to eq false }
end
end

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Qa do
describe '.request?' do
using RSpec::Parameterized::TableSyntax
where(:dot_com, :request_user_agent, :qa_user_agent, :result) do
false | 'qa_user_agent' | 'qa_user_agent' | false
true | nil | 'qa_user_agent' | false
true | '' | 'qa_user_agent' | false
true | 'qa_user_agent' | '' | false
true | 'qa_user_agent' | nil | false
true | 'qa_user_agent' | 'qa_user_agent' | true
end
with_them do
before do
allow(Gitlab).to receive(:com?).and_return(dot_com)
stub_env('GITLAB_QA_USER_AGENT', qa_user_agent)
end
subject { described_class.request?(request_user_agent) }
it { is_expected.to eq(result) }
end
end
end

View File

@ -0,0 +1,55 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::InProductMarketingEmailCtaClickedMetric do
using RSpec::Parameterized::TableSyntax
let(:email_attributes) { { cta_clicked_at: Date.yesterday, track: 'verify', series: 0 } }
let(:options) { { track: 'verify', series: 0 } }
let(:expected_value) { 2 }
let(:expected_query) do
'SELECT COUNT("in_product_marketing_emails"."id") FROM "in_product_marketing_emails"' \
' WHERE "in_product_marketing_emails"."cta_clicked_at" IS NOT NULL' \
' AND "in_product_marketing_emails"."series" = 0'\
' AND "in_product_marketing_emails"."track" = 1'
end
before do
create_list :in_product_marketing_email, 2, email_attributes
create :in_product_marketing_email, email_attributes.merge(cta_clicked_at: nil)
create :in_product_marketing_email, email_attributes.merge(track: 'team')
create :in_product_marketing_email, email_attributes.merge(series: 1)
end
it_behaves_like 'a correct instrumented metric value and query', {
options: { track: 'verify', series: 0 },
time_frame: 'all'
}
where(:options_key, :valid_value, :invalid_value) do
:track | 'admin_verify' | 'invite_team'
:series | 1 | 5
end
with_them do
it "raises an exception if option is not present" do
expect do
described_class.new(options: options.except(options_key), time_frame: 'all')
end.to raise_error(ArgumentError, %r{#{options_key} .* must be one of})
end
it "raises an exception if option has invalid value" do
expect do
options[options_key] = invalid_value
described_class.new(options: options, time_frame: 'all')
end.to raise_error(ArgumentError, %r{#{options_key} .* must be one of})
end
it "doesn't raise exceptions if option has valid value" do
options[options_key] = valid_value
described_class.new(options: options, time_frame: 'all')
end
end
end

View File

@ -0,0 +1,52 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::InProductMarketingEmailSentMetric do
using RSpec::Parameterized::TableSyntax
let(:email_attributes) { { track: 'verify', series: 0 } }
let(:expected_value) { 2 }
let(:expected_query) do
'SELECT COUNT("in_product_marketing_emails"."id") FROM "in_product_marketing_emails"' \
' WHERE "in_product_marketing_emails"."series" = 0'\
' AND "in_product_marketing_emails"."track" = 1'
end
before do
create_list :in_product_marketing_email, 2, email_attributes
create :in_product_marketing_email, email_attributes.merge(track: 'team')
create :in_product_marketing_email, email_attributes.merge(series: 1)
end
it_behaves_like 'a correct instrumented metric value and query', {
options: { track: 'verify', series: 0 },
time_frame: 'all'
}
where(:options_key, :valid_value, :invalid_value) do
:track | 'admin_verify' | 'invite_team'
:series | 1 | 5
end
with_them do
it "raises an exception if option is not present" do
expect do
described_class.new(options: email_attributes.except(options_key), time_frame: 'all')
end.to raise_error(ArgumentError, %r{#{options_key} .* must be one of})
end
it "raises an exception if option has invalid value" do
expect do
email_attributes[options_key] = invalid_value
described_class.new(options: email_attributes, time_frame: 'all')
end.to raise_error(ArgumentError, %r{#{options_key} .* must be one of})
end
it "doesn't raise exceptions if option has valid value" do
email_attributes[options_key] = valid_value
described_class.new(options: email_attributes, time_frame: 'all')
end
end
end

View File

@ -1178,110 +1178,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
describe '.email_campaign_counts' do
subject { described_class.send(:email_campaign_counts) }
context 'when queries time out' do
before do
allow_any_instance_of(ActiveRecord::Relation).to receive(:count).and_raise(ActiveRecord::StatementInvalid)
allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(should_raise_for_dev)
end
context 'with should_raise_for_dev? true' do
let(:should_raise_for_dev) { true }
it 'raises an error' do
expect { subject }.to raise_error(ActiveRecord::StatementInvalid)
end
end
context 'with should_raise_for_dev? false' do
let(:should_raise_for_dev) { false }
it 'returns -1 for email campaign data' do
expected_data = {
"in_product_marketing_email_create_0_sent" => -1,
"in_product_marketing_email_create_0_cta_clicked" => -1,
"in_product_marketing_email_create_1_sent" => -1,
"in_product_marketing_email_create_1_cta_clicked" => -1,
"in_product_marketing_email_create_2_sent" => -1,
"in_product_marketing_email_create_2_cta_clicked" => -1,
"in_product_marketing_email_team_short_0_sent" => -1,
"in_product_marketing_email_team_short_0_cta_clicked" => -1,
"in_product_marketing_email_trial_short_0_sent" => -1,
"in_product_marketing_email_trial_short_0_cta_clicked" => -1,
"in_product_marketing_email_admin_verify_0_sent" => -1,
"in_product_marketing_email_admin_verify_0_cta_clicked" => -1,
"in_product_marketing_email_verify_0_sent" => -1,
"in_product_marketing_email_verify_0_cta_clicked" => -1,
"in_product_marketing_email_verify_1_sent" => -1,
"in_product_marketing_email_verify_1_cta_clicked" => -1,
"in_product_marketing_email_verify_2_sent" => -1,
"in_product_marketing_email_verify_2_cta_clicked" => -1,
"in_product_marketing_email_trial_0_sent" => -1,
"in_product_marketing_email_trial_0_cta_clicked" => -1,
"in_product_marketing_email_trial_1_sent" => -1,
"in_product_marketing_email_trial_1_cta_clicked" => -1,
"in_product_marketing_email_trial_2_sent" => -1,
"in_product_marketing_email_trial_2_cta_clicked" => -1,
"in_product_marketing_email_team_0_sent" => -1,
"in_product_marketing_email_team_0_cta_clicked" => -1,
"in_product_marketing_email_team_1_sent" => -1,
"in_product_marketing_email_team_1_cta_clicked" => -1,
"in_product_marketing_email_team_2_sent" => -1,
"in_product_marketing_email_team_2_cta_clicked" => -1
}
expect(subject).to eq(expected_data)
end
end
end
context 'when there are entries' do
before do
create(:in_product_marketing_email, track: :create, series: 0, cta_clicked_at: Time.zone.now)
create(:in_product_marketing_email, track: :verify, series: 0)
end
it 'gathers email campaign data' do
expected_data = {
"in_product_marketing_email_create_0_sent" => 1,
"in_product_marketing_email_create_0_cta_clicked" => 1,
"in_product_marketing_email_create_1_sent" => 0,
"in_product_marketing_email_create_1_cta_clicked" => 0,
"in_product_marketing_email_create_2_sent" => 0,
"in_product_marketing_email_create_2_cta_clicked" => 0,
"in_product_marketing_email_team_short_0_sent" => 0,
"in_product_marketing_email_team_short_0_cta_clicked" => 0,
"in_product_marketing_email_trial_short_0_sent" => 0,
"in_product_marketing_email_trial_short_0_cta_clicked" => 0,
"in_product_marketing_email_admin_verify_0_sent" => 0,
"in_product_marketing_email_admin_verify_0_cta_clicked" => 0,
"in_product_marketing_email_verify_0_sent" => 1,
"in_product_marketing_email_verify_0_cta_clicked" => 0,
"in_product_marketing_email_verify_1_sent" => 0,
"in_product_marketing_email_verify_1_cta_clicked" => 0,
"in_product_marketing_email_verify_2_sent" => 0,
"in_product_marketing_email_verify_2_cta_clicked" => 0,
"in_product_marketing_email_trial_0_sent" => 0,
"in_product_marketing_email_trial_0_cta_clicked" => 0,
"in_product_marketing_email_trial_1_sent" => 0,
"in_product_marketing_email_trial_1_cta_clicked" => 0,
"in_product_marketing_email_trial_2_sent" => 0,
"in_product_marketing_email_trial_2_cta_clicked" => 0,
"in_product_marketing_email_team_0_sent" => 0,
"in_product_marketing_email_team_0_cta_clicked" => 0,
"in_product_marketing_email_team_1_sent" => 0,
"in_product_marketing_email_team_1_cta_clicked" => 0,
"in_product_marketing_email_team_2_sent" => 0,
"in_product_marketing_email_team_2_cta_clicked" => 0
}
expect(subject).to eq(expected_data)
end
end
end
describe ".with_duration" do
it 'records duration' do
expect(::Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator)

View File

@ -1,92 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe MigrateSidekiqQueuedJobs, :clean_gitlab_redis_queues do
around do |example|
Sidekiq::Testing.disable!(&example)
end
describe '#up', :aggregate_failures, :silence_stdout do
before do
EmailReceiverWorker.sidekiq_options queue: 'email_receiver'
EmailReceiverWorker.perform_async('foo')
EmailReceiverWorker.perform_async('bar')
end
after do
EmailReceiverWorker.set_queue
end
context 'with worker_queue_mappings mocked' do
it 'migrates the jobs to the correct destination queue' do
allow(Gitlab::SidekiqConfig).to receive(:worker_queue_mappings)
.and_return({ "EmailReceiverWorker" => "default" })
expect(queue_length('email_receiver')).to eq(2)
expect(queue_length('default')).to eq(0)
migrate!
expect(queue_length('email_receiver')).to eq(0)
expect(queue_length('default')).to eq(2)
jobs = list_jobs('default')
expect(jobs[0]).to include("class" => "EmailReceiverWorker", "args" => ["bar"])
expect(jobs[1]).to include("class" => "EmailReceiverWorker", "args" => ["foo"])
end
end
context 'without worker_queue_mappings mocked' do
it 'migration still runs' do
# Assuming Settings.sidekiq.routing_rules is [] (named queue)
# If the default Settings.sidekiq.routing_rules or Gitlab::SidekiqConfig.worker_queue_mappings changed,
# this spec might be failing. We'll have to adjust the migration or this spec.
expect(queue_length('email_receiver')).to eq(2)
expect(queue_length('default')).to eq(0)
migrate!
expect(queue_length('email_receiver')).to eq(2)
expect(queue_length('default')).to eq(0)
jobs = list_jobs('email_receiver')
expect(jobs[0]).to include("class" => "EmailReceiverWorker", "args" => ["bar"])
expect(jobs[1]).to include("class" => "EmailReceiverWorker", "args" => ["foo"])
end
end
context 'with illegal JSON payload' do
let(:job) { '{foo: 1}' }
before do
Sidekiq.redis do |conn|
conn.lpush("queue:email_receiver", job)
end
end
it 'logs an error' do
allow(Gitlab::SidekiqConfig).to receive(:worker_queue_mappings)
.and_return({ "EmailReceiverWorker" => "default" })
allow(::Gitlab::BackgroundMigration::Logger).to receive(:build).and_return(Logger.new($stdout))
migrate!
expect($stdout.string).to include("Unmarshal JSON payload from SidekiqMigrateJobs failed. Job: #{job}")
end
end
context 'when run in GitLab.com' do
it 'skips the migration' do
allow(Gitlab).to receive(:com?).and_return(true)
expect(described_class::SidekiqMigrateJobs).not_to receive(:new)
migrate!
end
end
def queue_length(queue_name)
Sidekiq.redis do |conn|
conn.llen("queue:#{queue_name}")
end
end
def list_jobs(queue_name)
Sidekiq.redis { |conn| conn.lrange("queue:#{queue_name}", 0, -1) }
.map { |item| Sidekiq.load_json item }
end
end
end

View File

@ -4229,14 +4229,6 @@ RSpec.describe MergeRequest, factory_default: :keep do
transition!
end
context 'when trigger_mr_subscription_on_merge_status_change is disabled' do
before do
stub_feature_flags(trigger_mr_subscription_on_merge_status_change: false)
end
it_behaves_like 'transition not triggering mergeRequestMergeStatusUpdated GraphQL subscription'
end
end
shared_examples 'for an invalid state transition' do

Some files were not shown because too many files have changed in this diff Show More