diff --git a/GITLAB_KAS_VERSION b/GITLAB_KAS_VERSION
index d169b2f2d0a..4daad9a5c1d 100644
--- a/GITLAB_KAS_VERSION
+++ b/GITLAB_KAS_VERSION
@@ -1 +1 @@
-0.0.8
+13.6.1
diff --git a/app/assets/javascripts/diffs/components/app.vue b/app/assets/javascripts/diffs/components/app.vue
index a800cc8edc8..9d8d184a3f6 100644
--- a/app/assets/javascripts/diffs/components/app.vue
+++ b/app/assets/javascripts/diffs/components/app.vue
@@ -20,6 +20,8 @@ import HiddenFilesWarning from './hidden_files_warning.vue';
import MergeConflictWarning from './merge_conflict_warning.vue';
import CollapsedFilesWarning from './collapsed_files_warning.vue';
+import { diffsApp } from '../utils/performance';
+
import {
TREE_LIST_WIDTH_STORAGE_KEY,
INITIAL_TREE_WIDTH,
@@ -272,8 +274,12 @@ export default {
);
}
},
+ beforeCreate() {
+ diffsApp.instrument();
+ },
created() {
this.adjustView();
+
eventHub.$once('fetchDiffData', this.fetchData);
eventHub.$on('refetchDiffData', this.refetchDiffData);
this.CENTERED_LIMITED_CONTAINER_CLASSES = CENTERED_LIMITED_CONTAINER_CLASSES;
@@ -294,6 +300,8 @@ export default {
);
},
beforeDestroy() {
+ diffsApp.deinstrument();
+
eventHub.$off('fetchDiffData', this.fetchData);
eventHub.$off('refetchDiffData', this.refetchDiffData);
this.removeEventListeners();
@@ -487,9 +495,11 @@ export default {
{
+ eventHub.$emit(event);
+ });
+ },
handleToggle() {
const currentCollapsedFlag = this.isCollapsed;
@@ -197,7 +231,8 @@ export default {
})
.then(() => {
requestIdleCallback(
- () => {
+ async () => {
+ await this.postRender();
this.assignDiscussionsToDiff(this.getDiffFileDiscussions(this.file));
},
{ timeout: 1000 },
diff --git a/app/assets/javascripts/diffs/constants.js b/app/assets/javascripts/diffs/constants.js
index 709bfe693e6..79f8c08e389 100644
--- a/app/assets/javascripts/diffs/constants.js
+++ b/app/assets/javascripts/diffs/constants.js
@@ -98,3 +98,8 @@ export const RENAMED_DIFF_TRANSITIONS = {
// MR Diffs known events
export const EVT_EXPAND_ALL_FILES = 'mr:diffs:expandAllFiles';
+export const EVT_PERF_MARK_FILE_TREE_START = 'mr:diffs:perf:fileTreeStart';
+export const EVT_PERF_MARK_FILE_TREE_END = 'mr:diffs:perf:fileTreeEnd';
+export const EVT_PERF_MARK_DIFF_FILES_START = 'mr:diffs:perf:filesStart';
+export const EVT_PERF_MARK_FIRST_DIFF_FILE_SHOWN = 'mr:diffs:perf:firstFileShown';
+export const EVT_PERF_MARK_DIFF_FILES_END = 'mr:diffs:perf:filesEnd';
diff --git a/app/assets/javascripts/diffs/store/actions.js b/app/assets/javascripts/diffs/store/actions.js
index 5a3d836a158..72b99ca8486 100644
--- a/app/assets/javascripts/diffs/store/actions.js
+++ b/app/assets/javascripts/diffs/store/actions.js
@@ -8,7 +8,8 @@ import { __, s__ } from '~/locale';
import { handleLocationHash, historyPushState, scrollToElement } from '~/lib/utils/common_utils';
import { mergeUrlParams, getLocationHash } from '~/lib/utils/url_utility';
import TreeWorker from '../workers/tree_worker';
-import eventHub from '../../notes/event_hub';
+import notesEventHub from '../../notes/event_hub';
+import eventHub from '../event_hub';
import {
getDiffPositionByLineCode,
getNoteFormData,
@@ -42,6 +43,9 @@ import {
NO_SHOW_WHITESPACE,
DIFF_FILE_MANUAL_COLLAPSE,
DIFF_FILE_AUTOMATIC_COLLAPSE,
+ EVT_PERF_MARK_FILE_TREE_START,
+ EVT_PERF_MARK_FILE_TREE_END,
+ EVT_PERF_MARK_DIFF_FILES_START,
} from '../constants';
import { diffViewerModes } from '~/ide/constants';
import { isCollapsed } from '../diff_file';
@@ -78,6 +82,7 @@ export const fetchDiffFilesBatch = ({ commit, state, dispatch }) => {
commit(types.SET_BATCH_LOADING, true);
commit(types.SET_RETRIEVING_BATCHES, true);
+ eventHub.$emit(EVT_PERF_MARK_DIFF_FILES_START);
const getBatch = (page = 1) =>
axios
@@ -139,9 +144,11 @@ export const fetchDiffFilesMeta = ({ commit, state }) => {
};
commit(types.SET_LOADING, true);
+ eventHub.$emit(EVT_PERF_MARK_FILE_TREE_START);
worker.addEventListener('message', ({ data }) => {
commit(types.SET_TREE_DATA, data);
+ eventHub.$emit(EVT_PERF_MARK_FILE_TREE_END);
worker.terminate();
});
@@ -215,7 +222,7 @@ export const assignDiscussionsToDiff = (
}
Vue.nextTick(() => {
- eventHub.$emit('scrollToDiscussion');
+ notesEventHub.$emit('scrollToDiscussion');
});
};
@@ -240,7 +247,7 @@ export const renderFileForDiscussionId = ({ commit, rootState, state }, discussi
}
if (file.viewer.automaticallyCollapsed) {
- eventHub.$emit(`loadCollapsedDiff/${file.file_hash}`);
+ notesEventHub.$emit(`loadCollapsedDiff/${file.file_hash}`);
scrollToElement(document.getElementById(file.file_hash));
} else if (file.viewer.manuallyCollapsed) {
commit(types.SET_FILE_COLLAPSED, {
@@ -248,9 +255,9 @@ export const renderFileForDiscussionId = ({ commit, rootState, state }, discussi
collapsed: false,
trigger: DIFF_FILE_AUTOMATIC_COLLAPSE,
});
- eventHub.$emit('scrollToDiscussion');
+ notesEventHub.$emit('scrollToDiscussion');
} else {
- eventHub.$emit('scrollToDiscussion');
+ notesEventHub.$emit('scrollToDiscussion');
}
}
}
@@ -485,7 +492,7 @@ export const setShowWhitespace = ({ commit }, { showWhitespace, pushState = fals
historyPushState(mergeUrlParams({ w }, window.location.href));
}
- eventHub.$emit('refetchDiffData');
+ notesEventHub.$emit('refetchDiffData');
};
export const toggleFileFinder = ({ commit }, visible) => {
diff --git a/app/assets/javascripts/diffs/utils/performance.js b/app/assets/javascripts/diffs/utils/performance.js
new file mode 100644
index 00000000000..dcde6f4ecc4
--- /dev/null
+++ b/app/assets/javascripts/diffs/utils/performance.js
@@ -0,0 +1,80 @@
+import { performanceMarkAndMeasure } from '~/performance/utils';
+import {
+ MR_DIFFS_MARK_FILE_TREE_START,
+ MR_DIFFS_MARK_FILE_TREE_END,
+ MR_DIFFS_MARK_DIFF_FILES_START,
+ MR_DIFFS_MARK_FIRST_DIFF_FILE_SHOWN,
+ MR_DIFFS_MARK_DIFF_FILES_END,
+ MR_DIFFS_MEASURE_FILE_TREE_DONE,
+ MR_DIFFS_MEASURE_DIFF_FILES_DONE,
+} from '../../performance/constants';
+
+import eventHub from '../event_hub';
+import {
+ EVT_PERF_MARK_FILE_TREE_START,
+ EVT_PERF_MARK_FILE_TREE_END,
+ EVT_PERF_MARK_DIFF_FILES_START,
+ EVT_PERF_MARK_FIRST_DIFF_FILE_SHOWN,
+ EVT_PERF_MARK_DIFF_FILES_END,
+} from '../constants';
+
+function treeStart() {
+ performanceMarkAndMeasure({
+ mark: MR_DIFFS_MARK_FILE_TREE_START,
+ });
+}
+
+function treeEnd() {
+ performanceMarkAndMeasure({
+ mark: MR_DIFFS_MARK_FILE_TREE_END,
+ measures: [
+ {
+ name: MR_DIFFS_MEASURE_FILE_TREE_DONE,
+ start: MR_DIFFS_MARK_FILE_TREE_START,
+ end: MR_DIFFS_MARK_FILE_TREE_END,
+ },
+ ],
+ });
+}
+
+function filesStart() {
+ performanceMarkAndMeasure({
+ mark: MR_DIFFS_MARK_DIFF_FILES_START,
+ });
+}
+
+function filesEnd() {
+ performanceMarkAndMeasure({
+ mark: MR_DIFFS_MARK_DIFF_FILES_END,
+ measures: [
+ {
+ name: MR_DIFFS_MEASURE_DIFF_FILES_DONE,
+ start: MR_DIFFS_MARK_DIFF_FILES_START,
+ end: MR_DIFFS_MARK_DIFF_FILES_END,
+ },
+ ],
+ });
+}
+
+function firstFile() {
+ performanceMarkAndMeasure({
+ mark: MR_DIFFS_MARK_FIRST_DIFF_FILE_SHOWN,
+ });
+}
+
+export const diffsApp = {
+ instrument() {
+ eventHub.$on(EVT_PERF_MARK_FILE_TREE_START, treeStart);
+ eventHub.$on(EVT_PERF_MARK_FILE_TREE_END, treeEnd);
+ eventHub.$on(EVT_PERF_MARK_DIFF_FILES_START, filesStart);
+ eventHub.$on(EVT_PERF_MARK_DIFF_FILES_END, filesEnd);
+ eventHub.$on(EVT_PERF_MARK_FIRST_DIFF_FILE_SHOWN, firstFile);
+ },
+ deinstrument() {
+ eventHub.$off(EVT_PERF_MARK_FIRST_DIFF_FILE_SHOWN, firstFile);
+ eventHub.$off(EVT_PERF_MARK_DIFF_FILES_END, filesEnd);
+ eventHub.$off(EVT_PERF_MARK_DIFF_FILES_START, filesStart);
+ eventHub.$off(EVT_PERF_MARK_FILE_TREE_END, treeEnd);
+ eventHub.$off(EVT_PERF_MARK_FILE_TREE_START, treeStart);
+ },
+};
diff --git a/app/assets/javascripts/performance/constants.js b/app/assets/javascripts/performance/constants.js
index 6b6b6f1da40..816eb9b3a66 100644
--- a/app/assets/javascripts/performance/constants.js
+++ b/app/assets/javascripts/performance/constants.js
@@ -29,3 +29,17 @@ export const WEBIDE_MARK_FILE_FINISH = 'webide-file-finished';
export const WEBIDE_MEASURE_TREE_FROM_REQUEST = 'webide-tree-loading-from-request';
export const WEBIDE_MEASURE_FILE_FROM_REQUEST = 'webide-file-loading-from-request';
export const WEBIDE_MEASURE_FILE_AFTER_INTERACTION = 'webide-file-loading-after-interaction';
+
+//
+// MR Diffs namespace
+
+// Marks
+export const MR_DIFFS_MARK_FILE_TREE_START = 'mr-diffs-mark-file-tree-start';
+export const MR_DIFFS_MARK_FILE_TREE_END = 'mr-diffs-mark-file-tree-end';
+export const MR_DIFFS_MARK_DIFF_FILES_START = 'mr-diffs-mark-diff-files-start';
+export const MR_DIFFS_MARK_FIRST_DIFF_FILE_SHOWN = 'mr-diffs-mark-first-diff-file-shown';
+export const MR_DIFFS_MARK_DIFF_FILES_END = 'mr-diffs-mark-diff-files-end';
+
+// Measures
+export const MR_DIFFS_MEASURE_FILE_TREE_DONE = 'mr-diffs-measure-file-tree-done';
+export const MR_DIFFS_MEASURE_DIFF_FILES_DONE = 'mr-diffs-measure-diff-files-done';
diff --git a/app/assets/javascripts/search/dropdown_filter/components/dropdown_filter.vue b/app/assets/javascripts/search/dropdown_filter/components/dropdown_filter.vue
new file mode 100644
index 00000000000..08619fa2066
--- /dev/null
+++ b/app/assets/javascripts/search/dropdown_filter/components/dropdown_filter.vue
@@ -0,0 +1,108 @@
+
+
+
+
+
+ {{ filterData.header }}
+
+
+
+ {{ f.label }}
+
+
+
diff --git a/app/assets/javascripts/search/dropdown_filter/constants/confidential_filter_data.js b/app/assets/javascripts/search/dropdown_filter/constants/confidential_filter_data.js
new file mode 100644
index 00000000000..b29daca89cb
--- /dev/null
+++ b/app/assets/javascripts/search/dropdown_filter/constants/confidential_filter_data.js
@@ -0,0 +1,36 @@
+import { __ } from '~/locale';
+
+const header = __('Confidentiality');
+
+const filters = {
+ ANY: {
+ label: __('Any'),
+ value: null,
+ },
+ CONFIDENTIAL: {
+ label: __('Confidential'),
+ value: 'yes',
+ },
+ NOT_CONFIDENTIAL: {
+ label: __('Not confidential'),
+ value: 'no',
+ },
+};
+
+const scopes = {
+ ISSUES: 'issues',
+};
+
+const filterByScope = {
+ [scopes.ISSUES]: [filters.ANY, filters.CONFIDENTIAL, filters.NOT_CONFIDENTIAL],
+};
+
+const filterParam = 'confidential';
+
+export default {
+ header,
+ filters,
+ scopes,
+ filterByScope,
+ filterParam,
+};
diff --git a/app/assets/javascripts/search/dropdown_filter/constants/state_filter_data.js b/app/assets/javascripts/search/dropdown_filter/constants/state_filter_data.js
new file mode 100644
index 00000000000..0b93aa0be29
--- /dev/null
+++ b/app/assets/javascripts/search/dropdown_filter/constants/state_filter_data.js
@@ -0,0 +1,42 @@
+import { __ } from '~/locale';
+
+const header = __('Status');
+
+const filters = {
+ ANY: {
+ label: __('Any'),
+ value: 'all',
+ },
+ OPEN: {
+ label: __('Open'),
+ value: 'opened',
+ },
+ CLOSED: {
+ label: __('Closed'),
+ value: 'closed',
+ },
+ MERGED: {
+ label: __('Merged'),
+ value: 'merged',
+ },
+};
+
+const scopes = {
+ ISSUES: 'issues',
+ MERGE_REQUESTS: 'merge_requests',
+};
+
+const filterByScope = {
+ [scopes.ISSUES]: [filters.ANY, filters.OPEN, filters.CLOSED],
+ [scopes.MERGE_REQUESTS]: [filters.ANY, filters.OPEN, filters.MERGED, filters.CLOSED],
+};
+
+const filterParam = 'state';
+
+export default {
+ header,
+ filters,
+ scopes,
+ filterByScope,
+ filterParam,
+};
diff --git a/app/assets/javascripts/search/dropdown_filter/index.js b/app/assets/javascripts/search/dropdown_filter/index.js
new file mode 100644
index 00000000000..e5e0745d990
--- /dev/null
+++ b/app/assets/javascripts/search/dropdown_filter/index.js
@@ -0,0 +1,38 @@
+import Vue from 'vue';
+import Translate from '~/vue_shared/translate';
+import DropdownFilter from './components/dropdown_filter.vue';
+import stateFilterData from './constants/state_filter_data';
+import confidentialFilterData from './constants/confidential_filter_data';
+
+Vue.use(Translate);
+
+const mountDropdownFilter = (store, { id, filterData }) => {
+ const el = document.getElementById(id);
+
+ if (!el) return false;
+
+ return new Vue({
+ el,
+ store,
+ render(createElement) {
+ return createElement(DropdownFilter, {
+ props: {
+ filterData,
+ },
+ });
+ },
+ });
+};
+
+const dropdownFilters = [
+ {
+ id: 'js-search-filter-by-state',
+ filterData: stateFilterData,
+ },
+ {
+ id: 'js-search-filter-by-confidential',
+ filterData: confidentialFilterData,
+ },
+];
+
+export default store => [...dropdownFilters].map(filter => mountDropdownFilter(store, filter));
diff --git a/app/assets/javascripts/search/index.js b/app/assets/javascripts/search/index.js
index 7508b3c9a55..275d6351adc 100644
--- a/app/assets/javascripts/search/index.js
+++ b/app/assets/javascripts/search/index.js
@@ -1,11 +1,17 @@
import { queryToObject } from '~/lib/utils/url_utility';
import createStore from './store';
+import initDropdownFilters from './dropdown_filter';
import { initSidebar } from './sidebar';
import initGroupFilter from './group_filter';
export default () => {
const store = createStore({ query: queryToObject(window.location.search) });
- initSidebar(store);
+ if (gon.features.searchFacets) {
+ initSidebar(store);
+ } else {
+ initDropdownFilters(store);
+ }
+
initGroupFilter(store);
};
diff --git a/app/assets/javascripts/search/sidebar/components/app.vue b/app/assets/javascripts/search/sidebar/components/app.vue
deleted file mode 100644
index 0c50f93d381..00000000000
--- a/app/assets/javascripts/search/sidebar/components/app.vue
+++ /dev/null
@@ -1,41 +0,0 @@
-
-
-
-
-
diff --git a/app/assets/javascripts/search/sidebar/components/confidentiality_filter.vue b/app/assets/javascripts/search/sidebar/components/confidentiality_filter.vue
index 38dccb9675d..f8938e799aa 100644
--- a/app/assets/javascripts/search/sidebar/components/confidentiality_filter.vue
+++ b/app/assets/javascripts/search/sidebar/components/confidentiality_filter.vue
@@ -21,6 +21,5 @@ export default {
-
diff --git a/app/assets/javascripts/search/sidebar/components/status_filter.vue b/app/assets/javascripts/search/sidebar/components/status_filter.vue
index 5cec2090906..876123ccc52 100644
--- a/app/assets/javascripts/search/sidebar/components/status_filter.vue
+++ b/app/assets/javascripts/search/sidebar/components/status_filter.vue
@@ -21,6 +21,5 @@ export default {
-
diff --git a/app/assets/javascripts/search/sidebar/index.js b/app/assets/javascripts/search/sidebar/index.js
index 6419e8ac2c6..b19016edf3d 100644
--- a/app/assets/javascripts/search/sidebar/index.js
+++ b/app/assets/javascripts/search/sidebar/index.js
@@ -1,11 +1,12 @@
import Vue from 'vue';
import Translate from '~/vue_shared/translate';
-import GlobalSearchSidebar from './components/app.vue';
+import StatusFilter from './components/status_filter.vue';
+import ConfidentialityFilter from './components/confidentiality_filter.vue';
Vue.use(Translate);
-export const initSidebar = store => {
- const el = document.getElementById('js-search-sidebar');
+const mountRadioFilters = (store, { id, component }) => {
+ const el = document.getElementById(id);
if (!el) return false;
@@ -13,7 +14,21 @@ export const initSidebar = store => {
el,
store,
render(createElement) {
- return createElement(GlobalSearchSidebar);
+ return createElement(component);
},
});
};
+
+const radioFilters = [
+ {
+ id: 'js-search-filter-by-state',
+ component: StatusFilter,
+ },
+ {
+ id: 'js-search-filter-by-confidential',
+ component: ConfidentialityFilter,
+ },
+];
+
+export const initSidebar = store =>
+ [...radioFilters].map(filter => mountRadioFilters(store, filter));
diff --git a/app/assets/javascripts/search/store/actions.js b/app/assets/javascripts/search/store/actions.js
index 447278aa223..722ed2eec26 100644
--- a/app/assets/javascripts/search/store/actions.js
+++ b/app/assets/javascripts/search/store/actions.js
@@ -1,7 +1,6 @@
import Api from '~/api';
import createFlash from '~/flash';
import { __ } from '~/locale';
-import { visitUrl, setUrlParams } from '~/lib/utils/url_utility';
import * as types from './mutation_types';
export const fetchGroups = ({ commit }, search) => {
@@ -19,11 +18,3 @@ export const fetchGroups = ({ commit }, search) => {
export const setQuery = ({ commit }, { key, value }) => {
commit(types.SET_QUERY, { key, value });
};
-
-export const applyQuery = ({ state }) => {
- visitUrl(setUrlParams({ ...state.query, page: null }));
-};
-
-export const resetQuery = ({ state }) => {
- visitUrl(setUrlParams({ ...state.query, page: null, state: null, confidential: null }));
-};
diff --git a/app/controllers/concerns/lfs_request.rb b/app/controllers/concerns/lfs_request.rb
index 2844acea271..bc3fd32759f 100644
--- a/app/controllers/concerns/lfs_request.rb
+++ b/app/controllers/concerns/lfs_request.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
# This concern assumes:
+# - a `#container` accessor
# - a `#project` accessor
# - a `#user` accessor
# - a `#authentication_result` accessor
@@ -11,6 +12,7 @@
# - a `#has_authentication_ability?(ability)` method
module LfsRequest
extend ActiveSupport::Concern
+ include Gitlab::Utils::StrongMemoize
CONTENT_TYPE = 'application/vnd.git-lfs+json'
@@ -29,16 +31,19 @@ module LfsRequest
message: _('Git LFS is not enabled on this GitLab server, contact your admin.'),
documentation_url: help_url
},
+ content_type: CONTENT_TYPE,
status: :not_implemented
)
end
def lfs_check_access!
- return render_lfs_not_found unless project
+ return render_lfs_not_found unless container&.lfs_enabled?
return if download_request? && lfs_download_access?
return if upload_request? && lfs_upload_access?
- if project.public? || can?(user, :read_project, project)
+ # Only return a 403 response if the user has download access permission,
+ # otherwise return a 404 to avoid exposing the existence of the container.
+ if lfs_download_access?
lfs_forbidden!
else
render_lfs_not_found
@@ -72,9 +77,9 @@ module LfsRequest
end
def lfs_download_access?
- return false unless project.lfs_enabled?
-
- ci? || lfs_deploy_token? || user_can_download_code? || build_can_download_code? || deploy_token_can_download_code?
+ strong_memoize(:lfs_download_access) do
+ ci? || lfs_deploy_token? || user_can_download_code? || build_can_download_code? || deploy_token_can_download_code?
+ end
end
def deploy_token_can_download_code?
@@ -93,11 +98,12 @@ module LfsRequest
end
def lfs_upload_access?
- return false unless project.lfs_enabled?
- return false unless has_authentication_ability?(:push_code)
- return false if limit_exceeded?
+ strong_memoize(:lfs_upload_access) do
+ next false unless has_authentication_ability?(:push_code)
+ next false if limit_exceeded?
- lfs_deploy_token? || can?(user, :push_code, project)
+ lfs_deploy_token? || can?(user, :push_code, project)
+ end
end
def lfs_deploy_token?
diff --git a/app/controllers/groups/boards_controller.rb b/app/controllers/groups/boards_controller.rb
index c9333bab69b..c2d72610c66 100644
--- a/app/controllers/groups/boards_controller.rb
+++ b/app/controllers/groups/boards_controller.rb
@@ -8,7 +8,7 @@ class Groups::BoardsController < Groups::ApplicationController
before_action :assign_endpoint_vars
before_action do
push_frontend_feature_flag(:graphql_board_lists, group, default_enabled: false)
- push_frontend_feature_flag(:boards_with_swimlanes, group, default_enabled: false)
+ push_frontend_feature_flag(:boards_with_swimlanes, group, default_enabled: true)
end
feature_category :boards
diff --git a/app/controllers/projects/boards_controller.rb b/app/controllers/projects/boards_controller.rb
index 87207e30c1f..fe4502a0e06 100644
--- a/app/controllers/projects/boards_controller.rb
+++ b/app/controllers/projects/boards_controller.rb
@@ -8,7 +8,7 @@ class Projects::BoardsController < Projects::ApplicationController
before_action :authorize_read_board!, only: [:index, :show]
before_action :assign_endpoint_vars
before_action do
- push_frontend_feature_flag(:boards_with_swimlanes, project, default_enabled: false)
+ push_frontend_feature_flag(:boards_with_swimlanes, project, default_enabled: true)
end
feature_category :boards
diff --git a/app/controllers/projects/settings/ci_cd_controller.rb b/app/controllers/projects/settings/ci_cd_controller.rb
index 8ac83edef8d..f76278a12a4 100644
--- a/app/controllers/projects/settings/ci_cd_controller.rb
+++ b/app/controllers/projects/settings/ci_cd_controller.rb
@@ -5,6 +5,8 @@ module Projects
class CiCdController < Projects::ApplicationController
include RunnerSetupScripts
+ NUMBER_OF_RUNNERS_PER_PAGE = 20
+
before_action :authorize_admin_pipeline!
before_action :define_variables
before_action do
@@ -108,13 +110,13 @@ module Projects
end
def define_runners_variables
- @project_runners = @project.runners.ordered
+ @project_runners = @project.runners.ordered.page(params[:project_page]).per(NUMBER_OF_RUNNERS_PER_PAGE).with_tags
@assignable_runners = current_user
.ci_owned_runners
.assignable_for(project)
.ordered
- .page(params[:page]).per(20)
+ .page(params[:specific_page]).per(NUMBER_OF_RUNNERS_PER_PAGE)
@shared_runners = ::Ci::Runner.instance_type.active
diff --git a/app/controllers/repositories/git_http_client_controller.rb b/app/controllers/repositories/git_http_client_controller.rb
index de452aa69b7..ec854bd0dde 100644
--- a/app/controllers/repositories/git_http_client_controller.rb
+++ b/app/controllers/repositories/git_http_client_controller.rb
@@ -6,7 +6,7 @@ module Repositories
include KerberosSpnegoHelper
include Gitlab::Utils::StrongMemoize
- attr_reader :authentication_result, :redirected_path, :container
+ attr_reader :authentication_result, :redirected_path
delegate :actor, :authentication_abilities, to: :authentication_result, allow_nil: true
delegate :type, to: :authentication_result, allow_nil: true, prefix: :auth_result
@@ -75,6 +75,12 @@ module Repositories
headers['Www-Authenticate'] = challenges.join("\n") if challenges.any?
end
+ def container
+ parse_repo_path unless defined?(@container)
+
+ @container
+ end
+
def project
parse_repo_path unless defined?(@project)
diff --git a/app/controllers/repositories/lfs_api_controller.rb b/app/controllers/repositories/lfs_api_controller.rb
index 35751a2578f..96185608c09 100644
--- a/app/controllers/repositories/lfs_api_controller.rb
+++ b/app/controllers/repositories/lfs_api_controller.rb
@@ -17,9 +17,9 @@ module Repositories
end
if download_request?
- render json: { objects: download_objects! }
+ render json: { objects: download_objects! }, content_type: LfsRequest::CONTENT_TYPE
elsif upload_request?
- render json: { objects: upload_objects! }
+ render json: { objects: upload_objects! }, content_type: LfsRequest::CONTENT_TYPE
else
raise "Never reached"
end
@@ -31,6 +31,7 @@ module Repositories
message: _('Server supports batch API only, please update your Git LFS client to version 1.0.1 and up.'),
documentation_url: "#{Gitlab.config.gitlab.url}/help"
},
+ content_type: LfsRequest::CONTENT_TYPE,
status: :not_implemented
)
end
diff --git a/app/controllers/repositories/lfs_storage_controller.rb b/app/controllers/repositories/lfs_storage_controller.rb
index 64a634b2cc2..48784842d48 100644
--- a/app/controllers/repositories/lfs_storage_controller.rb
+++ b/app/controllers/repositories/lfs_storage_controller.rb
@@ -29,7 +29,7 @@ module Repositories
def upload_finalize
if store_file!(oid, size)
- head 200
+ head 200, content_type: LfsRequest::CONTENT_TYPE
else
render plain: 'Unprocessable entity', status: :unprocessable_entity
end
diff --git a/app/controllers/search_controller.rb b/app/controllers/search_controller.rb
index 4b21edc98d5..0f149c24a59 100644
--- a/app/controllers/search_controller.rb
+++ b/app/controllers/search_controller.rb
@@ -24,6 +24,10 @@ class SearchController < ApplicationController
search_term_present && !params[:project_id].present?
end
+ before_action do
+ push_frontend_feature_flag(:search_facets)
+ end
+
layout 'search'
feature_category :global_search
diff --git a/app/graphql/mutations/releases/base.rb b/app/graphql/mutations/releases/base.rb
new file mode 100644
index 00000000000..d53cfbe6a11
--- /dev/null
+++ b/app/graphql/mutations/releases/base.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Mutations
+ module Releases
+ class Base < BaseMutation
+ include ResolvesProject
+
+ argument :project_path, GraphQL::ID_TYPE,
+ required: true,
+ description: 'Full path of the project the release is associated with'
+
+ private
+
+ def find_object(full_path:)
+ resolve_project(full_path: full_path)
+ end
+ end
+ end
+end
diff --git a/app/graphql/mutations/releases/create.rb b/app/graphql/mutations/releases/create.rb
new file mode 100644
index 00000000000..57c1541c368
--- /dev/null
+++ b/app/graphql/mutations/releases/create.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+module Mutations
+ module Releases
+ class Create < Base
+ graphql_name 'ReleaseCreate'
+
+ field :release,
+ Types::ReleaseType,
+ null: true,
+ description: 'The release after mutation'
+
+ argument :tag_name, GraphQL::STRING_TYPE,
+ required: true, as: :tag,
+ description: 'Name of the tag to associate with the release'
+
+ argument :ref, GraphQL::STRING_TYPE,
+ required: false,
+ description: 'The commit SHA or branch name to use if creating a new tag'
+
+ argument :name, GraphQL::STRING_TYPE,
+ required: false,
+ description: 'Name of the release'
+
+ argument :description, GraphQL::STRING_TYPE,
+ required: false,
+ description: 'Description (also known as "release notes") of the release'
+
+ argument :released_at, Types::TimeType,
+ required: false,
+ description: 'The date when the release will be/was ready. Defaults to the current time.'
+
+ argument :milestones, [GraphQL::STRING_TYPE],
+ required: false,
+ description: 'The title of each milestone the release is associated with. GitLab Premium customers can specify group milestones.'
+
+ argument :assets, Types::ReleaseAssetsInputType,
+ required: false,
+ description: 'Assets associated to the release'
+
+ authorize :create_release
+
+ def resolve(project_path:, milestones: nil, assets: nil, **scalars)
+ project = authorized_find!(full_path: project_path)
+
+ params = {
+ **scalars,
+ milestones: milestones.presence || [],
+ assets: assets.to_h
+ }.with_indifferent_access
+
+ result = ::Releases::CreateService.new(project, current_user, params).execute
+
+ if result[:status] == :success
+ {
+ release: result[:release],
+ errors: []
+ }
+ else
+ {
+ release: nil,
+ errors: [result[:message]]
+ }
+ end
+ end
+ end
+ end
+end
diff --git a/app/graphql/resolvers/metadata_resolver.rb b/app/graphql/resolvers/metadata_resolver.rb
index 3a79e6434fb..26bfa81038c 100644
--- a/app/graphql/resolvers/metadata_resolver.rb
+++ b/app/graphql/resolvers/metadata_resolver.rb
@@ -5,7 +5,7 @@ module Resolvers
type Types::MetadataType, null: false
def resolve(**args)
- { version: Gitlab::VERSION, revision: Gitlab.revision }
+ ::InstanceMetadata.new
end
end
end
diff --git a/app/graphql/types/mutation_type.rb b/app/graphql/types/mutation_type.rb
index 1c5202f428c..c2262fdd0e3 100644
--- a/app/graphql/types/mutation_type.rb
+++ b/app/graphql/types/mutation_type.rb
@@ -63,6 +63,7 @@ module Types
'destroyed during the update, and no Note will be returned'
mount_mutation Mutations::Notes::RepositionImageDiffNote
mount_mutation Mutations::Notes::Destroy
+ mount_mutation Mutations::Releases::Create
mount_mutation Mutations::Terraform::State::Delete
mount_mutation Mutations::Terraform::State::Lock
mount_mutation Mutations::Terraform::State::Unlock
diff --git a/app/graphql/types/release_asset_link_input_type.rb b/app/graphql/types/release_asset_link_input_type.rb
new file mode 100644
index 00000000000..d13861fad8f
--- /dev/null
+++ b/app/graphql/types/release_asset_link_input_type.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Types
+ # rubocop: disable Graphql/AuthorizeTypes
+ class ReleaseAssetLinkInputType < BaseInputObject
+ graphql_name 'ReleaseAssetLinkInput'
+ description 'Fields that are available when modifying a release asset link'
+
+ argument :name, GraphQL::STRING_TYPE,
+ required: true,
+ description: 'Name of the asset link'
+
+ argument :url, GraphQL::STRING_TYPE,
+ required: true,
+ description: 'URL of the asset link'
+
+ argument :direct_asset_path, GraphQL::STRING_TYPE,
+ required: false, as: :filepath,
+ description: 'Relative path for a direct asset link'
+
+ argument :link_type, Types::ReleaseAssetLinkTypeEnum,
+ required: false, default_value: 'other',
+ description: 'The type of the asset link'
+ end
+end
diff --git a/app/graphql/types/release_asset_link_type_enum.rb b/app/graphql/types/release_asset_link_type_enum.rb
index 01862ada56d..70601b9f8da 100644
--- a/app/graphql/types/release_asset_link_type_enum.rb
+++ b/app/graphql/types/release_asset_link_type_enum.rb
@@ -3,7 +3,7 @@
module Types
class ReleaseAssetLinkTypeEnum < BaseEnum
graphql_name 'ReleaseAssetLinkType'
- description 'Type of the link: `other`, `runbook`, `image`, `package`; defaults to `other`'
+ description 'Type of the link: `other`, `runbook`, `image`, `package`'
::Releases::Link.link_types.keys.each do |link_type|
value link_type.upcase, value: link_type, description: "#{link_type.titleize} link type"
diff --git a/app/graphql/types/release_assets_input_type.rb b/app/graphql/types/release_assets_input_type.rb
new file mode 100644
index 00000000000..3fcb517e044
--- /dev/null
+++ b/app/graphql/types/release_assets_input_type.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module Types
+ # rubocop: disable Graphql/AuthorizeTypes
+ class ReleaseAssetsInputType < BaseInputObject
+ graphql_name 'ReleaseAssetsInput'
+ description 'Fields that are available when modifying release assets'
+
+ argument :links, [Types::ReleaseAssetLinkInputType],
+ required: false,
+ description: 'A list of asset links to associate to the release'
+ end
+end
diff --git a/app/models/alert_management/alert.rb b/app/models/alert_management/alert.rb
index 61cc15a522e..7ce7f40b6a8 100644
--- a/app/models/alert_management/alert.rb
+++ b/app/models/alert_management/alert.rb
@@ -34,7 +34,7 @@ module AlertManagement
has_many :ordered_notes, -> { fresh }, as: :noteable, class_name: 'Note'
has_many :user_mentions, class_name: 'AlertManagement::AlertUserMention', foreign_key: :alert_management_alert_id
- has_internal_id :iid, scope: :project, init: ->(s) { s.project.alert_management_alerts.maximum(:iid) }
+ has_internal_id :iid, scope: :project
sha_attribute :fingerprint
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index 8083d4ed48a..c516b42dbd0 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -42,9 +42,16 @@ module Ci
belongs_to :external_pull_request
belongs_to :ci_ref, class_name: 'Ci::Ref', foreign_key: :ci_ref_id, inverse_of: :pipelines
- has_internal_id :iid, scope: :project, presence: false, track_if: -> { !importing? }, ensure_if: -> { !importing? }, init: ->(s) do
- s&.project&.all_pipelines&.maximum(:iid) || s&.project&.all_pipelines&.count
- end
+ has_internal_id :iid, scope: :project, presence: false,
+ track_if: -> { !importing? },
+ ensure_if: -> { !importing? },
+ init: ->(pipeline, scope) do
+ if pipeline
+ pipeline.project&.all_pipelines&.maximum(:iid) || pipeline.project&.all_pipelines&.count
+ elsif scope
+ ::Ci::Pipeline.where(**scope).maximum(:iid)
+ end
+ end
has_many :stages, -> { order(position: :asc) }, inverse_of: :pipeline
has_many :statuses, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline
diff --git a/app/models/concerns/atomic_internal_id.rb b/app/models/concerns/atomic_internal_id.rb
index 4a632e8cd0c..baa99fa5a7f 100644
--- a/app/models/concerns/atomic_internal_id.rb
+++ b/app/models/concerns/atomic_internal_id.rb
@@ -27,16 +27,42 @@ module AtomicInternalId
extend ActiveSupport::Concern
class_methods do
- def has_internal_id(column, scope:, init:, ensure_if: nil, track_if: nil, presence: true, backfill: false) # rubocop:disable Naming/PredicateName
- # We require init here to retain the ability to recalculate in the absence of a
- # InternalId record (we may delete records in `internal_ids` for example).
- raise "has_internal_id requires a init block, none given." unless init
+ def has_internal_id( # rubocop:disable Naming/PredicateName
+ column, scope:, init: :not_given, ensure_if: nil, track_if: nil,
+ presence: true, backfill: false, hook_names: :create)
+ raise "has_internal_id init must not be nil if given." if init.nil?
raise "has_internal_id needs to be defined on association." unless self.reflect_on_association(scope)
- before_validation :"track_#{scope}_#{column}!", on: :create, if: track_if
- before_validation :"ensure_#{scope}_#{column}!", on: :create, if: ensure_if
+ init = infer_init(scope) if init == :not_given
+ before_validation :"track_#{scope}_#{column}!", on: hook_names, if: track_if
+ before_validation :"ensure_#{scope}_#{column}!", on: hook_names, if: ensure_if
validates column, presence: presence
+ define_singleton_internal_id_methods(scope, column, init)
+ define_instance_internal_id_methods(scope, column, init, backfill)
+ end
+
+ private
+
+ def infer_init(scope)
+ case scope
+ when :project
+ AtomicInternalId.project_init(self)
+ when :group
+ AtomicInternalId.group_init(self)
+ else
+ # We require init here to retain the ability to recalculate in the absence of a
+ # InternalId record (we may delete records in `internal_ids` for example).
+ raise "has_internal_id - cannot infer init for scope: #{scope}"
+ end
+ end
+
+ # Defines instance methods:
+ # - ensure_{scope}_{column}!
+ # - track_{scope}_{column}!
+ # - reset_{scope}_{column}
+ # - {column}=
+ def define_instance_internal_id_methods(scope, column, init, backfill)
define_method("ensure_#{scope}_#{column}!") do
return if backfill && self.class.where(column => nil).exists?
@@ -103,19 +129,95 @@ module AtomicInternalId
read_attribute(column)
end
end
+
+ # Defines class methods:
+ #
+ # - with_{scope}_{column}_supply
+ # This method can be used to allocate a block of IID values during
+ # bulk operations (importing/copying, etc). This can be more efficient
+ # than creating instances one-by-one.
+ #
+ # Pass in a block that receives a `Supply` instance. To allocate a new
+ # IID value, call `Supply#next_value`.
+ #
+ # Example:
+ #
+ # MyClass.with_project_iid_supply(project) do |supply|
+ # attributes = MyClass.where(project: project).find_each do |record|
+ # record.attributes.merge(iid: supply.next_value)
+ # end
+ #
+ # bulk_insert(attributes)
+ # end
+ def define_singleton_internal_id_methods(scope, column, init)
+ define_singleton_method("with_#{scope}_#{column}_supply") do |scope_value, &block|
+ subject = find_by(scope => scope_value) || self
+ scope_attrs = ::AtomicInternalId.scope_attrs(scope_value)
+ usage = ::AtomicInternalId.scope_usage(self)
+
+ generator = InternalId::InternalIdGenerator.new(subject, scope_attrs, usage, init)
+
+ generator.with_lock do
+ supply = Supply.new(generator.record.last_value)
+ block.call(supply)
+ ensure
+ generator.track_greatest(supply.current_value) if supply
+ end
+ end
+ end
+ end
+
+ def self.scope_attrs(scope_value)
+ { scope_value.class.table_name.singularize.to_sym => scope_value } if scope_value
end
def internal_id_scope_attrs(scope)
scope_value = internal_id_read_scope(scope)
- { scope_value.class.table_name.singularize.to_sym => scope_value } if scope_value
+ ::AtomicInternalId.scope_attrs(scope_value)
end
def internal_id_scope_usage
- self.class.table_name.to_sym
+ ::AtomicInternalId.scope_usage(self.class)
+ end
+
+ def self.scope_usage(including_class)
+ including_class.table_name.to_sym
+ end
+
+ def self.project_init(klass, column_name = :iid)
+ ->(instance, scope) do
+ if instance
+ klass.where(project_id: instance.project_id).maximum(column_name)
+ elsif scope.present?
+ klass.where(**scope).maximum(column_name)
+ end
+ end
+ end
+
+ def self.group_init(klass, column_name = :iid)
+ ->(instance, scope) do
+ if instance
+ klass.where(group_id: instance.group_id).maximum(column_name)
+ elsif scope.present?
+ klass.where(group: scope[:namespace]).maximum(column_name)
+ end
+ end
end
def internal_id_read_scope(scope)
association(scope).reader
end
+
+ class Supply
+ attr_reader :current_value
+
+ def initialize(start_value)
+ @current_value = start_value
+ end
+
+ def next_value
+ @current_value += 1
+ end
+ end
end
diff --git a/app/models/concerns/enums/internal_id.rb b/app/models/concerns/enums/internal_id.rb
index 2d51d232e93..f01bd60ef16 100644
--- a/app/models/concerns/enums/internal_id.rb
+++ b/app/models/concerns/enums/internal_id.rb
@@ -14,7 +14,8 @@ module Enums
operations_feature_flags: 6,
operations_user_lists: 7,
alert_management_alerts: 8,
- sprints: 9 # iterations
+ sprints: 9, # iterations
+ design_management_designs: 10
}
end
end
diff --git a/app/models/deployment.rb b/app/models/deployment.rb
index b58794eb4d1..36ac1bdb236 100644
--- a/app/models/deployment.rb
+++ b/app/models/deployment.rb
@@ -21,9 +21,7 @@ class Deployment < ApplicationRecord
has_one :deployment_cluster
- has_internal_id :iid, scope: :project, track_if: -> { !importing? }, init: ->(s) do
- Deployment.where(project: s.project).maximum(:iid) if s&.project
- end
+ has_internal_id :iid, scope: :project, track_if: -> { !importing? }
validates :sha, presence: true
validates :ref, presence: true
diff --git a/app/models/design_management/design.rb b/app/models/design_management/design.rb
index 2f8232de592..9f2eada0890 100644
--- a/app/models/design_management/design.rb
+++ b/app/models/design_management/design.rb
@@ -2,6 +2,7 @@
module DesignManagement
class Design < ApplicationRecord
+ include AtomicInternalId
include Importable
include Noteable
include Gitlab::FileTypeDetection
@@ -26,6 +27,10 @@ module DesignManagement
has_many :events, as: :target, dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
+ has_internal_id :iid, scope: :project, presence: true,
+ hook_names: %i[create update], # Deal with old records
+ track_if: -> { !importing? }
+
validates :project, :filename, presence: true
validates :issue, presence: true, unless: :importing?
validates :filename, uniqueness: { scope: :issue_id }, length: { maximum: 255 }
diff --git a/app/models/instance_metadata.rb b/app/models/instance_metadata.rb
new file mode 100644
index 00000000000..96622d0b1b3
--- /dev/null
+++ b/app/models/instance_metadata.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+class InstanceMetadata
+ attr_reader :version, :revision
+
+ def initialize(version: Gitlab::VERSION, revision: Gitlab.revision)
+ @version = version
+ @revision = revision
+ end
+end
diff --git a/app/models/internal_id.rb b/app/models/internal_id.rb
index 4c0469d849a..c735e593da7 100644
--- a/app/models/internal_id.rb
+++ b/app/models/internal_id.rb
@@ -61,13 +61,13 @@ class InternalId < ApplicationRecord
class << self
def track_greatest(subject, scope, usage, new_value, init)
- InternalIdGenerator.new(subject, scope, usage)
- .track_greatest(init, new_value)
+ InternalIdGenerator.new(subject, scope, usage, init)
+ .track_greatest(new_value)
end
def generate_next(subject, scope, usage, init)
- InternalIdGenerator.new(subject, scope, usage)
- .generate(init)
+ InternalIdGenerator.new(subject, scope, usage, init)
+ .generate
end
def reset(subject, scope, usage, value)
@@ -99,15 +99,18 @@ class InternalId < ApplicationRecord
# 4) In the absence of a record in the internal_ids table, one will be created
# and last_value will be calculated on the fly.
#
- # subject: The instance we're generating an internal id for. Gets passed to init if called.
+ # subject: The instance or class we're generating an internal id for.
# scope: Attributes that define the scope for id generation.
+ # Valid keys are `project/project_id` and `namespace/namespace_id`.
# usage: Symbol to define the usage of the internal id, see InternalId.usages
- attr_reader :subject, :scope, :scope_attrs, :usage
+ # init: Proc that accepts the subject and the scope and returns Integer|NilClass
+ attr_reader :subject, :scope, :scope_attrs, :usage, :init
- def initialize(subject, scope, usage)
+ def initialize(subject, scope, usage, init = nil)
@subject = subject
@scope = scope
@usage = usage
+ @init = init
raise ArgumentError, 'Scope is not well-defined, need at least one column for scope (given: 0)' if scope.empty?
@@ -119,13 +122,13 @@ class InternalId < ApplicationRecord
# Generates next internal id and returns it
# init: Block that gets called to initialize InternalId record if not present
# Make sure to not throw exceptions in the absence of records (if this is expected).
- def generate(init)
+ def generate
subject.transaction do
# Create a record in internal_ids if one does not yet exist
# and increment its last value
#
# Note this will acquire a ROW SHARE lock on the InternalId record
- (lookup || create_record(init)).increment_and_save!
+ record.increment_and_save!
end
end
@@ -148,12 +151,20 @@ class InternalId < ApplicationRecord
# and set its new_value if it is higher than the current last_value
#
# Note this will acquire a ROW SHARE lock on the InternalId record
- def track_greatest(init, new_value)
+ def track_greatest(new_value)
subject.transaction do
- (lookup || create_record(init)).track_greatest_and_save!(new_value)
+ record.track_greatest_and_save!(new_value)
end
end
+ def record
+ @record ||= (lookup || create_record)
+ end
+
+ def with_lock(&block)
+ record.with_lock(&block)
+ end
+
private
# Retrieve InternalId record for (project, usage) combination, if it exists
@@ -171,12 +182,16 @@ class InternalId < ApplicationRecord
# was faster in doing this, we'll realize once we hit the unique key constraint
# violation. We can safely roll-back the nested transaction and perform
# a lookup instead to retrieve the record.
- def create_record(init)
+ def create_record
+ raise ArgumentError, 'Cannot initialize without init!' unless init
+
+ instance = subject.is_a?(::Class) ? nil : subject
+
subject.transaction(requires_new: true) do
InternalId.create!(
**scope,
usage: usage_value,
- last_value: init.call(subject) || 0
+ last_value: init.call(instance, scope) || 0
)
end
rescue ActiveRecord::RecordNotUnique
diff --git a/app/models/issue.rb b/app/models/issue.rb
index ffdde91b2a2..7dc18cacd7c 100644
--- a/app/models/issue.rb
+++ b/app/models/issue.rb
@@ -48,7 +48,7 @@ class Issue < ApplicationRecord
belongs_to :moved_to, class_name: 'Issue'
has_one :moved_from, class_name: 'Issue', foreign_key: :moved_to_id
- has_internal_id :iid, scope: :project, track_if: -> { !importing? }, init: ->(s) { s&.project&.issues&.maximum(:iid) }
+ has_internal_id :iid, scope: :project, track_if: -> { !importing? }
has_many :events, as: :target, dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
diff --git a/app/models/iteration.rb b/app/models/iteration.rb
index bd245de411c..ba7cd973e9d 100644
--- a/app/models/iteration.rb
+++ b/app/models/iteration.rb
@@ -17,8 +17,8 @@ class Iteration < ApplicationRecord
belongs_to :project
belongs_to :group
- has_internal_id :iid, scope: :project, init: ->(s) { s&.project&.iterations&.maximum(:iid) }
- has_internal_id :iid, scope: :group, init: ->(s) { s&.group&.iterations&.maximum(:iid) }
+ has_internal_id :iid, scope: :project
+ has_internal_id :iid, scope: :group
validates :start_date, presence: true
validates :due_date, presence: true
diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb
index 45eb2361cf4..e6c61c674d3 100644
--- a/app/models/merge_request.rb
+++ b/app/models/merge_request.rb
@@ -41,7 +41,14 @@ class MergeRequest < ApplicationRecord
belongs_to :merge_user, class_name: "User"
belongs_to :iteration, foreign_key: 'sprint_id'
- has_internal_id :iid, scope: :target_project, track_if: -> { !importing? }, init: ->(s) { s&.target_project&.merge_requests&.maximum(:iid) }
+ has_internal_id :iid, scope: :target_project, track_if: -> { !importing? },
+ init: ->(mr, scope) do
+ if mr
+ mr.target_project&.merge_requests&.maximum(:iid)
+ elsif scope[:project]
+ where(target_project: scope[:project]).maximum(:iid)
+ end
+ end
has_many :merge_request_diffs
has_many :merge_request_context_commits, inverse_of: :merge_request
diff --git a/app/models/milestone.rb b/app/models/milestone.rb
index 0a315ba8db2..c8776be5e4a 100644
--- a/app/models/milestone.rb
+++ b/app/models/milestone.rb
@@ -12,8 +12,8 @@ class Milestone < ApplicationRecord
has_many :milestone_releases
has_many :releases, through: :milestone_releases
- has_internal_id :iid, scope: :project, track_if: -> { !importing? }, init: ->(s) { s&.project&.milestones&.maximum(:iid) }
- has_internal_id :iid, scope: :group, track_if: -> { !importing? }, init: ->(s) { s&.group&.milestones&.maximum(:iid) }
+ has_internal_id :iid, scope: :project, track_if: -> { !importing? }
+ has_internal_id :iid, scope: :group, track_if: -> { !importing? }
has_many :events, as: :target, dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
diff --git a/app/models/operations/feature_flag.rb b/app/models/operations/feature_flag.rb
index c9e52fe51f2..442f9d36c43 100644
--- a/app/models/operations/feature_flag.rb
+++ b/app/models/operations/feature_flag.rb
@@ -13,7 +13,7 @@ module Operations
belongs_to :project
- has_internal_id :iid, scope: :project, init: ->(s) { s&.project&.operations_feature_flags&.maximum(:iid) }
+ has_internal_id :iid, scope: :project
default_value_for :active, true
diff --git a/app/models/operations/feature_flags/user_list.rb b/app/models/operations/feature_flags/user_list.rb
index 782f7a54058..3e492eaa892 100644
--- a/app/models/operations/feature_flags/user_list.rb
+++ b/app/models/operations/feature_flags/user_list.rb
@@ -13,7 +13,7 @@ module Operations
has_many :strategy_user_lists
has_many :strategies, through: :strategy_user_lists
- has_internal_id :iid, scope: :project, init: ->(s) { s&.project&.operations_feature_flags_user_lists&.maximum(:iid) }, presence: true
+ has_internal_id :iid, scope: :project, presence: true
validates :project, presence: true
validates :name,
diff --git a/app/policies/instance_metadata_policy.rb b/app/policies/instance_metadata_policy.rb
new file mode 100644
index 00000000000..3386217044d
--- /dev/null
+++ b/app/policies/instance_metadata_policy.rb
@@ -0,0 +1,5 @@
+# frozen_string_literal: true
+
+class InstanceMetadataPolicy < BasePolicy
+ delegate { :global }
+end
diff --git a/app/services/design_management/copy_design_collection/copy_service.rb b/app/services/design_management/copy_design_collection/copy_service.rb
index 5099c2c5704..c0b32e1e9ae 100644
--- a/app/services/design_management/copy_design_collection/copy_service.rb
+++ b/app/services/design_management/copy_design_collection/copy_service.rb
@@ -172,20 +172,26 @@ module DesignManagement
def copy_designs!
design_attributes = attributes_config[:design_attributes]
- new_rows = designs.map do |design|
- design.attributes.slice(*design_attributes).merge(
- issue_id: target_issue.id,
- project_id: target_project.id
+ ::DesignManagement::Design.with_project_iid_supply(target_project) do |supply|
+ new_rows = designs.each_with_index.map do |design, i|
+ design.attributes.slice(*design_attributes).merge(
+ issue_id: target_issue.id,
+ project_id: target_project.id,
+ iid: supply.next_value
+ )
+ end
+
+ # TODO Replace `Gitlab::Database.bulk_insert` with `BulkInsertSafe`
+ # once https://gitlab.com/gitlab-org/gitlab/-/issues/247718 is fixed.
+ # When this is fixed, we can remove the call to
+ # `with_project_iid_supply` above, since the objects will be instantiated
+ # and callbacks (including `ensure_project_iid!`) will fire.
+ ::Gitlab::Database.bulk_insert( # rubocop:disable Gitlab/BulkInsert
+ DesignManagement::Design.table_name,
+ new_rows,
+ return_ids: true
)
end
-
- # TODO Replace `Gitlab::Database.bulk_insert` with `BulkInsertSafe`
- # once https://gitlab.com/gitlab-org/gitlab/-/issues/247718 is fixed.
- ::Gitlab::Database.bulk_insert( # rubocop:disable Gitlab/BulkInsert
- DesignManagement::Design.table_name,
- new_rows,
- return_ids: true
- )
end
def copy_versions!
diff --git a/app/views/projects/runners/_runner.html.haml b/app/views/projects/runners/_runner.html.haml
index 74b6e981c00..1a3ba690184 100644
--- a/app/views/projects/runners/_runner.html.haml
+++ b/app/views/projects/runners/_runner.html.haml
@@ -37,8 +37,8 @@
- if runner.description.present?
%p.runner-description
= runner.description
- - if runner.tag_list.present?
+ - if runner.tags.present?
%p
- - runner.tag_list.sort.each do |tag|
+ - runner.tags.map(&:name).sort.each do |tag|
%span.badge.badge-primary
= tag
diff --git a/app/views/projects/runners/_specific_runners.html.haml b/app/views/projects/runners/_specific_runners.html.haml
index d7fe141e802..e02e2cc784a 100644
--- a/app/views/projects/runners/_specific_runners.html.haml
+++ b/app/views/projects/runners/_specific_runners.html.haml
@@ -17,9 +17,10 @@
%h4.underlined-title= _('Runners activated for this project')
%ul.bordered-list.activated-specific-runners
= render partial: 'projects/runners/runner', collection: @project_runners, as: :runner
+ = paginate @project_runners, theme: "gitlab", param_name: "project_page", params: { expand_runners: true, anchor: 'js-runners-settings' }
- if @assignable_runners.any?
%h4.underlined-title= _('Available specific runners')
%ul.bordered-list.available-specific-runners
= render partial: 'projects/runners/runner', collection: @assignable_runners, as: :runner
- = paginate @assignable_runners, theme: "gitlab", :params => { :anchor => '#js-runners-settings' }
+ = paginate @assignable_runners, theme: "gitlab", param_name: "specific_page", :params => { :anchor => 'js-runners-settings'}
diff --git a/app/views/projects/settings/ci_cd/show.html.haml b/app/views/projects/settings/ci_cd/show.html.haml
index 31669a11b8e..f6ecb923100 100644
--- a/app/views/projects/settings/ci_cd/show.html.haml
+++ b/app/views/projects/settings/ci_cd/show.html.haml
@@ -33,7 +33,7 @@
= render_if_exists 'projects/settings/ci_cd/protected_environments', expanded: expanded
-%section.settings.no-animate#js-runners-settings{ class: ('expanded' if expanded), data: { qa_selector: 'runners_settings_content' } }
+%section.settings.no-animate#js-runners-settings{ class: ('expanded' if expanded || params[:expand_runners]), data: { qa_selector: 'runners_settings_content' } }
.settings-header
%h4
= _("Runners")
diff --git a/app/views/search/_results.html.haml b/app/views/search/_results.html.haml
index 3af4437a63a..607e759928c 100644
--- a/app/views/search/_results.html.haml
+++ b/app/views/search/_results.html.haml
@@ -1,10 +1,7 @@
- if @search_objects.to_a.empty?
- .gl-display-md-flex
- - if %w(issues merge_requests).include?(@scope)
- #js-search-sidebar.gl-display-flex.gl-flex-direction-column.col-md-3.gl-mr-4{ }
- .gl-w-full
- = render partial: "search/results/empty"
- = render_if_exists 'shared/promotions/promote_advanced_search'
+ = render partial: "search/results/filters"
+ = render partial: "search/results/empty"
+ = render_if_exists 'shared/promotions/promote_advanced_search'
- else
.search-results-status
.row-content-block.gl-display-flex
@@ -27,21 +24,19 @@
.gl-display-md-flex.gl-flex-direction-column
= render partial: 'search/sort_dropdown'
= render_if_exists 'shared/promotions/promote_advanced_search'
+ = render partial: "search/results/filters"
- .results.gl-display-md-flex.gl-mt-3
- - if %w(issues merge_requests).include?(@scope)
- #js-search-sidebar{ }
- .gl-w-full
- - if @scope == 'commits'
- %ul.content-list.commit-list
- = render partial: "search/results/commit", collection: @search_objects
- - else
- .search-results
- - if @scope == 'projects'
- .term
- = render 'shared/projects/list', projects: @search_objects, pipeline_status: false
- - else
- = render_if_exists partial: "search/results/#{@scope.singularize}", collection: @search_objects
+ .results.gl-mt-3
+ - if @scope == 'commits'
+ %ul.content-list.commit-list
+ = render partial: "search/results/commit", collection: @search_objects
+ - else
+ .search-results
+ - if @scope == 'projects'
+ .term
+ = render 'shared/projects/list', projects: @search_objects, pipeline_status: false
+ - else
+ = render_if_exists partial: "search/results/#{@scope.singularize}", collection: @search_objects
- - if @scope != 'projects'
- = paginate_collection(@search_objects)
+ - if @scope != 'projects'
+ = paginate_collection(@search_objects)
diff --git a/app/views/search/results/_filters.html.haml b/app/views/search/results/_filters.html.haml
new file mode 100644
index 00000000000..2356a6e1f2c
--- /dev/null
+++ b/app/views/search/results/_filters.html.haml
@@ -0,0 +1,6 @@
+.d-lg-flex.align-items-end
+ #js-search-filter-by-state{ 'v-cloak': true }
+ #js-search-filter-by-confidential{ 'v-cloak': true }
+
+ - if %w(issues merge_requests).include?(@scope)
+ %hr.gl-mt-4.gl-mb-4
diff --git a/app/views/shared/boards/_show.html.haml b/app/views/shared/boards/_show.html.haml
index 9993342df38..ce48691166b 100644
--- a/app/views/shared/boards/_show.html.haml
+++ b/app/views/shared/boards/_show.html.haml
@@ -17,7 +17,7 @@
= render 'shared/issuable/search_bar', type: :boards, board: board
#board-app.boards-app.position-relative{ "v-cloak" => "true", data: board_data, ":class" => "{ 'is-compact': detailIssueVisible }" }
- - if Feature.enabled?(:boards_with_swimlanes, current_board_parent) || Feature.enabled?(:graphql_board_lists, current_board_parent)
+ - if Feature.enabled?(:boards_with_swimlanes, current_board_parent, default_enabled: true) || Feature.enabled?(:graphql_board_lists, current_board_parent)
%board-content{ "v-cloak" => "true",
"ref" => "board_content",
":lists" => "state.lists",
diff --git a/app/views/shared/issuable/_search_bar.html.haml b/app/views/shared/issuable/_search_bar.html.haml
index e991b901961..00b235809ed 100644
--- a/app/views/shared/issuable/_search_bar.html.haml
+++ b/app/views/shared/issuable/_search_bar.html.haml
@@ -182,7 +182,7 @@
= render 'shared/issuable/board_create_list_dropdown', board: board
- if @project
#js-add-issues-btn.gl-ml-3{ data: { can_admin_list: can?(current_user, :admin_list, @project) } }
- - if current_user && Feature.enabled?(:boards_with_swimlanes, @group)
+ - if current_user && Feature.enabled?(:boards_with_swimlanes, @group, default_enabled: true)
#js-board-epics-swimlanes-toggle
#js-toggle-focus-btn
- elsif is_not_boards_modal_or_productivity_analytics && show_sorting_dropdown
diff --git a/changelogs/unreleased/207869-lfs-enabled-checks.yml b/changelogs/unreleased/207869-lfs-enabled-checks.yml
new file mode 100644
index 00000000000..30a4a44a3b1
--- /dev/null
+++ b/changelogs/unreleased/207869-lfs-enabled-checks.yml
@@ -0,0 +1,5 @@
+---
+title: Block LFS requests on snippets
+merge_request: 45874
+author:
+type: fixed
diff --git a/changelogs/unreleased/241691-left-side-facets.yml b/changelogs/unreleased/241691-left-side-facets.yml
deleted file mode 100644
index bafbf36297b..00000000000
--- a/changelogs/unreleased/241691-left-side-facets.yml
+++ /dev/null
@@ -1,5 +0,0 @@
----
-title: Global Search - Left Sidebar
-merge_request: 46595
-author:
-type: added
diff --git a/changelogs/unreleased/ab-reindex-functional.yml b/changelogs/unreleased/ab-reindex-functional.yml
new file mode 100644
index 00000000000..b3da969ba1f
--- /dev/null
+++ b/changelogs/unreleased/ab-reindex-functional.yml
@@ -0,0 +1,5 @@
+---
+title: Expand postgres_indexes view
+merge_request: 47304
+author:
+type: other
diff --git a/changelogs/unreleased/ajk-214978-designs-iids.yml b/changelogs/unreleased/ajk-214978-designs-iids.yml
new file mode 100644
index 00000000000..5f397c6dcd1
--- /dev/null
+++ b/changelogs/unreleased/ajk-214978-designs-iids.yml
@@ -0,0 +1,5 @@
+---
+title: Add iid column to design_management_designs
+merge_request: 46596
+author:
+type: added
diff --git a/changelogs/unreleased/feature-mr-diffs-performance-marks.yml b/changelogs/unreleased/feature-mr-diffs-performance-marks.yml
new file mode 100644
index 00000000000..914991b2a91
--- /dev/null
+++ b/changelogs/unreleased/feature-mr-diffs-performance-marks.yml
@@ -0,0 +1,5 @@
+---
+title: Add performance marks and measures to the MR Diffs app at critical moments
+merge_request: 46434
+author:
+type: other
diff --git a/changelogs/unreleased/jivanvl-add-pagination-specific-runners.yml b/changelogs/unreleased/jivanvl-add-pagination-specific-runners.yml
new file mode 100644
index 00000000000..4f5a60c7ff6
--- /dev/null
+++ b/changelogs/unreleased/jivanvl-add-pagination-specific-runners.yml
@@ -0,0 +1,5 @@
+---
+title: Paginate project_runners in ci_cd settings
+merge_request: 45830
+author:
+type: fixed
diff --git a/changelogs/unreleased/nfriend-add-release-create-mutation.yml b/changelogs/unreleased/nfriend-add-release-create-mutation.yml
new file mode 100644
index 00000000000..46881bbd109
--- /dev/null
+++ b/changelogs/unreleased/nfriend-add-release-create-mutation.yml
@@ -0,0 +1,5 @@
+---
+title: Add releaseCreate mutation to GraphQL endpoint
+merge_request: 46263
+author:
+type: added
diff --git a/config/feature_flags/development/boards_with_swimlanes.yml b/config/feature_flags/development/boards_with_swimlanes.yml
index 98820b452c1..7080bceb777 100644
--- a/config/feature_flags/development/boards_with_swimlanes.yml
+++ b/config/feature_flags/development/boards_with_swimlanes.yml
@@ -1,8 +1,8 @@
---
name: boards_with_swimlanes
-introduced_by_url:
-rollout_issue_url:
-milestone:
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/issues/218040
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/238222
+milestone: 13.6
+group: group::product planning
type: development
-group: group::project management
-default_enabled: false
+default_enabled: true
diff --git a/config/feature_flags/development/search_facets.yml b/config/feature_flags/development/search_facets.yml
new file mode 100644
index 00000000000..b100c4a6490
--- /dev/null
+++ b/config/feature_flags/development/search_facets.yml
@@ -0,0 +1,7 @@
+---
+name: search_facets
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/46809
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/46595
+group: group::global search
+type: development
+default_enabled: false
diff --git a/db/migrate/20201030200115_add_iid_to_design_management_design.rb b/db/migrate/20201030200115_add_iid_to_design_management_design.rb
new file mode 100644
index 00000000000..c31cd86d5e2
--- /dev/null
+++ b/db/migrate/20201030200115_add_iid_to_design_management_design.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class AddIidToDesignManagementDesign < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ add_column :design_management_designs, :iid, :integer
+ end
+end
diff --git a/db/migrate/20201030200539_add_index_on_design_management_designs_iid_project_id.rb b/db/migrate/20201030200539_add_index_on_design_management_designs_iid_project_id.rb
new file mode 100644
index 00000000000..3ff50c299cb
--- /dev/null
+++ b/db/migrate/20201030200539_add_index_on_design_management_designs_iid_project_id.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+class AddIndexOnDesignManagementDesignsIidProjectId < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ INDEX_NAME = 'index_design_management_designs_on_iid_and_project_id'
+
+ def up
+ add_concurrent_index :design_management_designs, [:project_id, :iid],
+ name: INDEX_NAME,
+ unique: true
+ end
+
+ def down
+ remove_concurrent_index_by_name :design_management_designs, INDEX_NAME
+ end
+end
diff --git a/db/migrate/20201110110454_extend_postgres_indexes_view.rb b/db/migrate/20201110110454_extend_postgres_indexes_view.rb
new file mode 100644
index 00000000000..4e9172024d3
--- /dev/null
+++ b/db/migrate/20201110110454_extend_postgres_indexes_view.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+class ExtendPostgresIndexesView < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def up
+ execute(<<~SQL)
+ DROP VIEW postgres_indexes;
+
+ CREATE VIEW postgres_indexes AS
+ SELECT (pg_namespace.nspname::text || '.'::text) || pg_class.relname::text AS identifier,
+ pg_index.indexrelid,
+ pg_namespace.nspname AS schema,
+ pg_class.relname AS name,
+ pg_index.indisunique AS "unique",
+ pg_index.indisvalid AS valid_index,
+ pg_class.relispartition AS partitioned,
+ pg_index.indisexclusion AS exclusion,
+ pg_index.indexprs IS NOT NULL as expression,
+ pg_index.indpred IS NOT NULL as partial,
+ pg_indexes.indexdef AS definition,
+ pg_relation_size(pg_class.oid::regclass) AS ondisk_size_bytes
+ FROM pg_index
+ JOIN pg_class ON pg_class.oid = pg_index.indexrelid
+ JOIN pg_namespace ON pg_class.relnamespace = pg_namespace.oid
+ JOIN pg_indexes ON pg_class.relname = pg_indexes.indexname
+ WHERE pg_namespace.nspname <> 'pg_catalog'::name
+ AND (pg_namespace.nspname = ANY (ARRAY["current_schema"(), 'gitlab_partitions_dynamic'::name, 'gitlab_partitions_static'::name]));
+ SQL
+ end
+
+ def down
+ execute(<<~SQL)
+ DROP VIEW postgres_indexes;
+
+ CREATE VIEW postgres_indexes AS
+ SELECT (pg_namespace.nspname::text || '.'::text) || pg_class.relname::text AS identifier,
+ pg_index.indexrelid,
+ pg_namespace.nspname AS schema,
+ pg_class.relname AS name,
+ pg_index.indisunique AS "unique",
+ pg_index.indisvalid AS valid_index,
+ pg_class.relispartition AS partitioned,
+ pg_index.indisexclusion AS exclusion,
+ pg_indexes.indexdef AS definition,
+ pg_relation_size(pg_class.oid::regclass) AS ondisk_size_bytes
+ FROM pg_index
+ JOIN pg_class ON pg_class.oid = pg_index.indexrelid
+ JOIN pg_namespace ON pg_class.relnamespace = pg_namespace.oid
+ JOIN pg_indexes ON pg_class.relname = pg_indexes.indexname
+ WHERE pg_namespace.nspname <> 'pg_catalog'::name
+ AND (pg_namespace.nspname = ANY (ARRAY["current_schema"(), 'gitlab_partitions_dynamic'::name, 'gitlab_partitions_static'::name]));
+ SQL
+ end
+end
diff --git a/db/post_migrate/20201030203854_backfill_design_iids.rb b/db/post_migrate/20201030203854_backfill_design_iids.rb
new file mode 100644
index 00000000000..7acca6ad93d
--- /dev/null
+++ b/db/post_migrate/20201030203854_backfill_design_iids.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+class BackfillDesignIids < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ class Designs < ActiveRecord::Base
+ include EachBatch
+
+ self.table_name = 'design_management_designs'
+ end
+
+ def up
+ backfill = ::Gitlab::BackgroundMigration::BackfillDesignInternalIds.new(Designs)
+
+ Designs.select(:project_id).distinct.each_batch(of: 100, column: :project_id) do |relation|
+ backfill.perform(relation)
+ end
+ end
+
+ def down
+ # NOOP
+ end
+end
diff --git a/db/post_migrate/20201102152554_add_not_null_check_on_iid_on_design_manangement_designs.rb b/db/post_migrate/20201102152554_add_not_null_check_on_iid_on_design_manangement_designs.rb
new file mode 100644
index 00000000000..861a0c3c27a
--- /dev/null
+++ b/db/post_migrate/20201102152554_add_not_null_check_on_iid_on_design_manangement_designs.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddNotNullCheckOnIidOnDesignManangementDesigns < ActiveRecord::Migration[6.0]
+ include ::Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_not_null_constraint(:design_management_designs, :iid)
+ end
+
+ def down
+ remove_not_null_constraint(:design_management_designs, :iid)
+ end
+end
diff --git a/db/schema_migrations/20201030200115 b/db/schema_migrations/20201030200115
new file mode 100644
index 00000000000..ed9c743f538
--- /dev/null
+++ b/db/schema_migrations/20201030200115
@@ -0,0 +1 @@
+bef50f2417b9676c89aea838f7b9c85fb88af9f52c197d8eb4613a9c91bc7741
\ No newline at end of file
diff --git a/db/schema_migrations/20201030200539 b/db/schema_migrations/20201030200539
new file mode 100644
index 00000000000..f8e7642befb
--- /dev/null
+++ b/db/schema_migrations/20201030200539
@@ -0,0 +1 @@
+2f6c7efc1716d02dd40adb08bd09b9f1e63e4248619678c0562f4b8d581e6065
\ No newline at end of file
diff --git a/db/schema_migrations/20201030203854 b/db/schema_migrations/20201030203854
new file mode 100644
index 00000000000..6a337c366aa
--- /dev/null
+++ b/db/schema_migrations/20201030203854
@@ -0,0 +1 @@
+3937235469c8fb1f2b0af9cdf38933db5ae61552d1a9050755cec5f7c16ebb66
\ No newline at end of file
diff --git a/db/schema_migrations/20201102152554 b/db/schema_migrations/20201102152554
new file mode 100644
index 00000000000..f52694965c6
--- /dev/null
+++ b/db/schema_migrations/20201102152554
@@ -0,0 +1 @@
+7ec73c06ccc4c9f618e0455d0a7aae3b591bf52b5ddb1b3f1678d2fd50b9fd5e
\ No newline at end of file
diff --git a/db/schema_migrations/20201110110454 b/db/schema_migrations/20201110110454
new file mode 100644
index 00000000000..5b721550325
--- /dev/null
+++ b/db/schema_migrations/20201110110454
@@ -0,0 +1 @@
+f008d77d2a0aef463a924923d5a338030758d6b9c194756a0490b51a95681127
\ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index d411fa028cc..f9f57d321f8 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -11650,7 +11650,9 @@ CREATE TABLE design_management_designs (
issue_id integer,
filename character varying NOT NULL,
relative_position integer,
- CONSTRAINT check_07155e2715 CHECK ((char_length((filename)::text) <= 255))
+ iid integer,
+ CONSTRAINT check_07155e2715 CHECK ((char_length((filename)::text) <= 255)),
+ CONSTRAINT check_cfb92df01a CHECK ((iid IS NOT NULL))
);
CREATE SEQUENCE design_management_designs_id_seq
@@ -14722,6 +14724,8 @@ CREATE VIEW postgres_indexes AS
pg_index.indisvalid AS valid_index,
pg_class.relispartition AS partitioned,
pg_index.indisexclusion AS exclusion,
+ (pg_index.indexprs IS NOT NULL) AS expression,
+ (pg_index.indpred IS NOT NULL) AS partial,
pg_indexes.indexdef AS definition,
pg_relation_size((pg_class.oid)::regclass) AS ondisk_size_bytes
FROM (((pg_index
@@ -20592,6 +20596,8 @@ CREATE INDEX index_description_versions_on_merge_request_id ON description_versi
CREATE INDEX index_design_management_designs_issue_id_relative_position_id ON design_management_designs USING btree (issue_id, relative_position, id);
+CREATE UNIQUE INDEX index_design_management_designs_on_iid_and_project_id ON design_management_designs USING btree (project_id, iid);
+
CREATE UNIQUE INDEX index_design_management_designs_on_issue_id_and_filename ON design_management_designs USING btree (issue_id, filename);
CREATE INDEX index_design_management_designs_on_project_id ON design_management_designs USING btree (project_id);
diff --git a/doc/api/dependency_proxy.md b/doc/api/dependency_proxy.md
index fb100cc90d8..7590583bdb6 100644
--- a/doc/api/dependency_proxy.md
+++ b/doc/api/dependency_proxy.md
@@ -12,6 +12,9 @@ info: To determine the technical writer assigned to the Stage/Group associated w
Deletes the cached blobs for a group. This endpoint requires group admin access.
+CAUTION: **Warning:**
+[A bug exists](https://gitlab.com/gitlab-org/gitlab/-/issues/277161) for this API.
+
```plaintext
DELETE /groups/:id/dependency_proxy/cache
```
diff --git a/doc/api/graphql/reference/gitlab_schema.graphql b/doc/api/graphql/reference/gitlab_schema.graphql
index 1c57851fe9d..3a8195bbe30 100644
--- a/doc/api/graphql/reference/gitlab_schema.graphql
+++ b/doc/api/graphql/reference/gitlab_schema.graphql
@@ -13476,6 +13476,7 @@ type Mutation {
prometheusIntegrationResetToken(input: PrometheusIntegrationResetTokenInput!): PrometheusIntegrationResetTokenPayload
prometheusIntegrationUpdate(input: PrometheusIntegrationUpdateInput!): PrometheusIntegrationUpdatePayload
promoteToEpic(input: PromoteToEpicInput!): PromoteToEpicPayload
+ releaseCreate(input: ReleaseCreateInput!): ReleaseCreatePayload
removeAwardEmoji(input: RemoveAwardEmojiInput!): RemoveAwardEmojiPayload @deprecated(reason: "Use awardEmojiRemove. Deprecated in 13.2")
removeProjectFromSecurityDashboard(input: RemoveProjectFromSecurityDashboardInput!): RemoveProjectFromSecurityDashboardPayload
@@ -17734,7 +17735,32 @@ type ReleaseAssetLinkEdge {
}
"""
-Type of the link: `other`, `runbook`, `image`, `package`; defaults to `other`
+Fields that are available when modifying a release asset link
+"""
+input ReleaseAssetLinkInput {
+ """
+ Relative path for a direct asset link
+ """
+ directAssetPath: String
+
+ """
+ The type of the asset link
+ """
+ linkType: ReleaseAssetLinkType = OTHER
+
+ """
+ Name of the asset link
+ """
+ name: String!
+
+ """
+ URL of the asset link
+ """
+ url: String!
+}
+
+"""
+Type of the link: `other`, `runbook`, `image`, `package`
"""
enum ReleaseAssetLinkType {
"""
@@ -17818,6 +17844,16 @@ type ReleaseAssets {
): ReleaseSourceConnection
}
+"""
+Fields that are available when modifying release assets
+"""
+input ReleaseAssetsInput {
+ """
+ A list of asset links to associate to the release
+ """
+ links: [ReleaseAssetLinkInput!]
+}
+
"""
The connection type for Release.
"""
@@ -17843,6 +17879,76 @@ type ReleaseConnection {
pageInfo: PageInfo!
}
+"""
+Autogenerated input type of ReleaseCreate
+"""
+input ReleaseCreateInput {
+ """
+ Assets associated to the release
+ """
+ assets: ReleaseAssetsInput
+
+ """
+ A unique identifier for the client performing the mutation.
+ """
+ clientMutationId: String
+
+ """
+ Description (also known as "release notes") of the release
+ """
+ description: String
+
+ """
+ The title of each milestone the release is associated with. GitLab Premium customers can specify group milestones.
+ """
+ milestones: [String!]
+
+ """
+ Name of the release
+ """
+ name: String
+
+ """
+ Full path of the project the release is associated with
+ """
+ projectPath: ID!
+
+ """
+ The commit SHA or branch name to use if creating a new tag
+ """
+ ref: String
+
+ """
+ The date when the release will be/was ready. Defaults to the current time.
+ """
+ releasedAt: Time
+
+ """
+ Name of the tag to associate with the release
+ """
+ tagName: String!
+}
+
+"""
+Autogenerated return type of ReleaseCreate
+"""
+type ReleaseCreatePayload {
+ """
+ A unique identifier for the client performing the mutation.
+ """
+ clientMutationId: String
+
+ """
+ Errors encountered during execution of the mutation.
+ """
+ errors: [String!]!
+
+ """
+ The release after mutation
+ """
+ release: Release
+}
+
"""
An edge in a connection.
"""
diff --git a/doc/api/graphql/reference/gitlab_schema.json b/doc/api/graphql/reference/gitlab_schema.json
index 4f44328f827..165b8389f5a 100644
--- a/doc/api/graphql/reference/gitlab_schema.json
+++ b/doc/api/graphql/reference/gitlab_schema.json
@@ -39200,6 +39200,33 @@
"isDeprecated": false,
"deprecationReason": null
},
+ {
+ "name": "releaseCreate",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "ReleaseCreateInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "ReleaseCreatePayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
{
"name": "removeAwardEmoji",
"description": null,
@@ -51265,10 +51292,69 @@
"enumValues": null,
"possibleTypes": null
},
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "ReleaseAssetLinkInput",
+ "description": "Fields that are available when modifying a release asset link",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "name",
+ "description": "Name of the asset link",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "url",
+ "description": "URL of the asset link",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "directAssetPath",
+ "description": "Relative path for a direct asset link",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "linkType",
+ "description": "The type of the asset link",
+ "type": {
+ "kind": "ENUM",
+ "name": "ReleaseAssetLinkType",
+ "ofType": null
+ },
+ "defaultValue": "OTHER"
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
{
"kind": "ENUM",
"name": "ReleaseAssetLinkType",
- "description": "Type of the link: `other`, `runbook`, `image`, `package`; defaults to `other`",
+ "description": "Type of the link: `other`, `runbook`, `image`, `package`",
"fields": null,
"inputFields": null,
"interfaces": null,
@@ -51433,6 +51519,35 @@
"enumValues": null,
"possibleTypes": null
},
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "ReleaseAssetsInput",
+ "description": "Fields that are available when modifying release assets",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "links",
+ "description": "A list of asset links to associate to the release",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "ReleaseAssetLinkInput",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
{
"kind": "OBJECT",
"name": "ReleaseConnection",
@@ -51518,6 +51633,190 @@
"enumValues": null,
"possibleTypes": null
},
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "ReleaseCreateInput",
+ "description": "Autogenerated input type of ReleaseCreate",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "projectPath",
+ "description": "Full path of the project the release is associated with",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "tagName",
+ "description": "Name of the tag to associate with the release",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "ref",
+ "description": "The commit SHA or branch name to use if creating a new tag",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "name",
+ "description": "Name of the release",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "description",
+ "description": "Description (also known as \"release notes\") of the release",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "releasedAt",
+ "description": "The date when the release will be/was ready. Defaults to the current time.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "Time",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "milestones",
+ "description": "The title of each milestone the release is associated with. GitLab Premium customers can specify group milestones.",
+ "type": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "assets",
+ "description": "Assets associated to the release",
+ "type": {
+ "kind": "INPUT_OBJECT",
+ "name": "ReleaseAssetsInput",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "ReleaseCreatePayload",
+ "description": "Autogenerated return type of ReleaseCreate",
+ "fields": [
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "errors",
+ "description": "Errors encountered during execution of the mutation.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "release",
+ "description": "The release after mutation",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Release",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
{
"kind": "OBJECT",
"name": "ReleaseEdge",
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index 2225e15af0f..990daf2fd0c 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -2496,6 +2496,16 @@ A container for all assets associated with a release.
| `links` | ReleaseAssetLinkConnection | Asset links of the release |
| `sources` | ReleaseSourceConnection | Sources of the release |
+### ReleaseCreatePayload
+
+Autogenerated return type of ReleaseCreate.
+
+| Field | Type | Description |
+| ----- | ---- | ----------- |
+| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
+| `errors` | String! => Array | Errors encountered during execution of the mutation. |
+| `release` | Release | The release after mutation |
+
### ReleaseEvidence
Evidence for a release.
@@ -4097,7 +4107,7 @@ State of a Geo registry.
### ReleaseAssetLinkType
-Type of the link: `other`, `runbook`, `image`, `package`; defaults to `other`.
+Type of the link: `other`, `runbook`, `image`, `package`.
| Value | Description |
| ----- | ----------- |
diff --git a/doc/user/admin_area/img/mr_approval_settings_compliance_project_v13_5.png b/doc/user/admin_area/img/mr_approval_settings_compliance_project_v13_5.png
deleted file mode 100644
index 281ccc7aa03..00000000000
Binary files a/doc/user/admin_area/img/mr_approval_settings_compliance_project_v13_5.png and /dev/null differ
diff --git a/doc/user/admin_area/img/scope_mr_approval_settings_v13_5.png b/doc/user/admin_area/img/scope_mr_approval_settings_v13_5.png
deleted file mode 100644
index 10cab491827..00000000000
Binary files a/doc/user/admin_area/img/scope_mr_approval_settings_v13_5.png and /dev/null differ
diff --git a/doc/user/admin_area/merge_requests_approvals.md b/doc/user/admin_area/merge_requests_approvals.md
index e3e235065f5..fb9ca21a214 100644
--- a/doc/user/admin_area/merge_requests_approvals.md
+++ b/doc/user/admin_area/merge_requests_approvals.md
@@ -5,50 +5,29 @@ info: To determine the technical writer assigned to the Stage/Group associated w
type: reference, concepts
---
-# Instance-level merge request approval rules **(PREMIUM ONLY)**
+# Merge request approval rules **(PREMIUM ONLY)**
> Introduced in [GitLab Premium](https://gitlab.com/gitlab-org/gitlab/-/issues/39060) 12.8.
-Merge request approvals rules prevent users overriding certain settings on a project
-level. When configured, only administrators can change these settings on a project level
-if they are enabled at an instance level.
+Merge request approval rules prevent users from overriding certain settings on the project
+level. When enabled at the instance level, these settings are no longer editable on the
+project level.
To enable merge request approval rules for an instance:
1. Navigate to **Admin Area >** **{push-rules}** **Push Rules** and expand **Merge
- requests approvals**.
+requests approvals**.
1. Set the required rule.
1. Click **Save changes**.
-GitLab administrators can later override these settings in a project’s settings.
-
## Available rules
Merge request approval rules that can be set at an instance level are:
- **Prevent approval of merge requests by merge request author**. Prevents project
- maintainers from allowing request authors to merge their own merge requests.
+maintainers from allowing request authors to merge their own merge requests.
- **Prevent approval of merge requests by merge request committers**. Prevents project
- maintainers from allowing users to approve merge requests if they have submitted
- any commits to the source branch.
-- **Can override approvers and approvals required per merge request**. Allows project
- maintainers to modify the approvers list in individual merge requests.
-
-## Scope rules to compliance-labeled projects
-
-> Introduced in [GitLab Premium](https://gitlab.com/groups/gitlab-org/-/epics/3432) 13.2.
-
-Merge request approval rules can be further scoped to specific compliance frameworks.
-
-When the compliance framework label is selected and the project is assigned the compliance
-label, the instance-level MR approval settings will take effect and the
-[project-level settings](../project/merge_requests/merge_request_approvals.md#adding--editing-a-default-approval-rule)
-is locked for modification.
-
-When the compliance framework label is not selected or the project is not assigned the
-compliance label, the project-level MR approval settings will take effect and the users with
-Maintainer role and above can modify these.
-
-| Instance-level | Project-level |
-| -------------- | ------------- |
-|  |  |
+maintainers from allowing users to approve merge requests if they have submitted
+any commits to the source branch.
+- **Prevent users from modifying merge request approvers list**. Prevents users from
+modifying the approvers list in project settings or in individual merge requests.
diff --git a/doc/user/application_security/vulnerabilities/index.md b/doc/user/application_security/vulnerabilities/index.md
index c70545369b2..95bb1ff1a67 100644
--- a/doc/user/application_security/vulnerabilities/index.md
+++ b/doc/user/application_security/vulnerabilities/index.md
@@ -11,10 +11,10 @@ info: To determine the technical writer assigned to the Stage/Group associated w
Each security vulnerability in a project's [Security Dashboard](../security_dashboard/index.md#project-security-dashboard) has an individual page which includes:
-- Details of the vulnerability.
+- Details for the vulnerability.
- The status of the vulnerability within the project.
- Available actions for the vulnerability.
-- Issues related to the vulnerability.
+- Any issues related to the vulnerability.
On the vulnerability page, you can interact with the vulnerability in
several different ways:
@@ -26,21 +26,21 @@ several different ways:
By default, such issues are [confidential](../../project/issues/confidential_issues.md).
- [Link issues](#link-issues-to-the-vulnerability) - Link existing issues to vulnerability.
- [Automatic remediation](#automatic-remediation-for-vulnerabilities) - For some vulnerabilities,
- a solution is provided for how to fix the vulnerability.
+ a solution is provided for how to fix the vulnerability automatically.
## Changing vulnerability status
You can switch the status of a vulnerability using the **Status** dropdown to one of
the following values:
-| Status | Description |
-|-----------|-------------------------------------------------------------------|
-| Detected | The default state for a newly discovered vulnerability |
-| Confirmed | A user has seen this vulnerability and confirmed it to be real |
-| Dismissed | A user has seen this vulnerability and dismissed it |
-| Resolved | The vulnerability has been fixed and is no longer in the codebase |
+| Status | Description |
+|-----------|------------------------------------------------------------------------------------------------------------------|
+| Detected | The default state for a newly discovered vulnerability |
+| Confirmed | A user has seen this vulnerability and confirmed it to be accurate |
+| Dismissed | A user has seen this vulnerability and dismissed it because it is not accurate or otherwise will not be resolved |
+| Resolved | The vulnerability has been fixed and is no longer valid |
-A timeline shows you when the vulnerability status has changed,
+A timeline shows you when the vulnerability status has changed
and allows you to comment on a change.
## Creating an issue for a vulnerability
@@ -48,7 +48,7 @@ and allows you to comment on a change.
You can create an issue for a vulnerability by selecting the **Create issue** button.
This creates a [confidential issue](../../project/issues/confidential_issues.md) in the
-project the vulnerability came from, and pre-populates it with useful information from
+project the vulnerability came from and pre-populates it with useful information from
the vulnerability report. After the issue is created, GitLab redirects you to the
issue page so you can edit, assign, or comment on the issue.
diff --git a/doc/user/group/iterations/index.md b/doc/user/group/iterations/index.md
index 2eb50f07de3..bd160f2ace8 100644
--- a/doc/user/group/iterations/index.md
+++ b/doc/user/group/iterations/index.md
@@ -50,7 +50,7 @@ To create an iteration:
## Edit an iteration
-> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/218277) in [GitLab Starter](https://about.gitlab.com/pricing/) 13.2.
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/218277) in [GitLab Starter](https://about.gitlab.com/pricing/) 13.2.
NOTE: **Note:**
You need Developer [permissions](../../permissions.md) or higher to edit an iteration.
@@ -73,7 +73,7 @@ An iteration report displays a list of all the issues assigned to an iteration a
To view an iteration report, go to the iterations list page and click an iteration's title.
-## Disable Iterations **(CORE ONLY)**
+## Disable Iterations **(STARTER ONLY)**
GitLab Iterations feature is deployed with a feature flag that is **enabled by default**.
[GitLab administrators with access to the GitLab Rails console](../../../administration/feature_flags.md)
diff --git a/doc/user/group/saml_sso/index.md b/doc/user/group/saml_sso/index.md
index 9618616534f..94d2c9afb24 100644
--- a/doc/user/group/saml_sso/index.md
+++ b/doc/user/group/saml_sso/index.md
@@ -105,7 +105,7 @@ When [configuring your identify provider](#configuring-your-identity-provider),
### Azure setup notes
-For a demo of the Azure SAML setup including SCIM, see [SCIM Provisioning on Azure Using SAML SSO for Groups Demo](https://youtu.be/24-ZxmTeEBU).
+For a demo of the Azure SAML setup including SCIM, see [SCIM Provisioning on Azure Using SAML SSO for Groups Demo](https://youtu.be/24-ZxmTeEBU). Please note that the video is outdated in regards to objectID mapping and the [SCIM documentation should be followed](scim_setup.md#azure-configuration-steps).
| GitLab Setting | Azure Field |
|--------------|----------------|
diff --git a/lib/gitlab/background_migration/backfill_design_internal_ids.rb b/lib/gitlab/background_migration/backfill_design_internal_ids.rb
new file mode 100644
index 00000000000..553571d5d00
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_design_internal_ids.rb
@@ -0,0 +1,130 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Backfill design.iid for a range of projects
+ class BackfillDesignInternalIds
+ # See app/models/internal_id
+ # This is a direct copy of the application code with the following changes:
+ # - usage enum is hard-coded to the value for design_management_designs
+ # - init is not passed around, but ignored
+ class InternalId < ActiveRecord::Base
+ def self.track_greatest(subject, scope, new_value)
+ InternalIdGenerator.new(subject, scope).track_greatest(new_value)
+ end
+
+ # Increments #last_value with new_value if it is greater than the current,
+ # and saves the record
+ #
+ # The operation locks the record and gathers a `ROW SHARE` lock (in PostgreSQL).
+ # As such, the increment is atomic and safe to be called concurrently.
+ def track_greatest_and_save!(new_value)
+ update_and_save { self.last_value = [last_value || 0, new_value].max }
+ end
+
+ private
+
+ def update_and_save(&block)
+ lock!
+ yield
+ # update_and_save_counter.increment(usage: usage, changed: last_value_changed?)
+ save!
+ last_value
+ end
+ end
+
+ # See app/models/internal_id
+ class InternalIdGenerator
+ attr_reader :subject, :scope, :scope_attrs
+
+ def initialize(subject, scope)
+ @subject = subject
+ @scope = scope
+
+ raise ArgumentError, 'Scope is not well-defined, need at least one column for scope (given: 0)' if scope.empty?
+ end
+
+ # Create a record in internal_ids if one does not yet exist
+ # and set its new_value if it is higher than the current last_value
+ #
+ # Note this will acquire a ROW SHARE lock on the InternalId record
+ def track_greatest(new_value)
+ subject.transaction do
+ record.track_greatest_and_save!(new_value)
+ end
+ end
+
+ def record
+ @record ||= (lookup || create_record)
+ end
+
+ def lookup
+ InternalId.find_by(**scope, usage: usage_value)
+ end
+
+ def usage_value
+ 10 # see Enums::InternalId - this is the value for design_management_designs
+ end
+
+ # Create InternalId record for (scope, usage) combination, if it doesn't exist
+ #
+ # We blindly insert without synchronization. If another process
+ # was faster in doing this, we'll realize once we hit the unique key constraint
+ # violation. We can safely roll-back the nested transaction and perform
+ # a lookup instead to retrieve the record.
+ def create_record
+ subject.transaction(requires_new: true) do
+ InternalId.create!(
+ **scope,
+ usage: usage_value,
+ last_value: 0
+ )
+ end
+ rescue ActiveRecord::RecordNotUnique
+ lookup
+ end
+ end
+
+ attr_reader :design_class
+
+ def initialize(design_class)
+ @design_class = design_class
+ end
+
+ def perform(relation)
+ start_id, end_id = relation.pluck("min(project_id), max(project_id)").flatten
+ table = 'design_management_designs'
+
+ ActiveRecord::Base.connection.execute <<~SQL
+ WITH
+ starting_iids(project_id, iid) as (
+ SELECT project_id, MAX(COALESCE(iid, 0))
+ FROM #{table}
+ WHERE project_id BETWEEN #{start_id} AND #{end_id}
+ GROUP BY project_id
+ ),
+ with_calculated_iid(id, iid) as (
+ SELECT design.id,
+ init.iid + ROW_NUMBER() OVER (PARTITION BY design.project_id ORDER BY design.id ASC)
+ FROM #{table} as design, starting_iids as init
+ WHERE design.project_id BETWEEN #{start_id} AND #{end_id}
+ AND design.iid IS NULL
+ AND init.project_id = design.project_id
+ )
+
+ UPDATE #{table}
+ SET iid = with_calculated_iid.iid
+ FROM with_calculated_iid
+ WHERE #{table}.id = with_calculated_iid.id
+ SQL
+
+ # track the new greatest IID value
+ relation.each do |design|
+ current_max = design_class.where(project_id: design.project_id).maximum(:iid)
+ scope = { project_id: design.project_id }
+ InternalId.track_greatest(design, scope, current_max)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/database/reindexing.rb b/lib/gitlab/database/reindexing.rb
index 074752fe75b..c77e000254f 100644
--- a/lib/gitlab/database/reindexing.rb
+++ b/lib/gitlab/database/reindexing.rb
@@ -10,6 +10,7 @@ module Gitlab
def self.candidate_indexes
Gitlab::Database::PostgresIndex
.regular
+ .where('NOT expression')
.not_match("^#{ConcurrentReindex::TEMPORARY_INDEX_PREFIX}")
.not_match("^#{ConcurrentReindex::REPLACED_INDEX_PREFIX}")
end
diff --git a/lib/gitlab/design_management/copy_design_collection_model_attributes.yml b/lib/gitlab/design_management/copy_design_collection_model_attributes.yml
index 1d341e6520e..95f15bd6dee 100644
--- a/lib/gitlab/design_management/copy_design_collection_model_attributes.yml
+++ b/lib/gitlab/design_management/copy_design_collection_model_attributes.yml
@@ -29,6 +29,7 @@ ignore_design_attributes:
- id
- issue_id
- project_id
+ - iid
ignore_version_attributes:
- id
diff --git a/lib/gitlab/experimentation.rb b/lib/gitlab/experimentation.rb
index fdb869eb4c2..6e39776bbd4 100644
--- a/lib/gitlab/experimentation.rb
+++ b/lib/gitlab/experimentation.rb
@@ -1,7 +1,5 @@
# frozen_string_literal: true
-require 'zlib'
-
# == Experimentation
#
# Utility module for A/B testing experimental features. Define your experiments in the `EXPERIMENTS` constant.
@@ -87,126 +85,6 @@ module Gitlab
}
}.freeze
- # Controller concern that checks if an `experimentation_subject_id cookie` is present and sets it if absent.
- # Used for A/B testing of experimental features. Exposes the `experiment_enabled?(experiment_name)` method
- # to controllers and views. It returns true when the experiment is enabled and the user is selected as part
- # of the experimental group.
- #
- module ControllerConcern
- include ::Gitlab::Experimentation::GroupTypes
- extend ActiveSupport::Concern
-
- included do
- before_action :set_experimentation_subject_id_cookie, unless: :dnt_enabled?
- helper_method :experiment_enabled?, :experiment_tracking_category_and_group
- end
-
- def set_experimentation_subject_id_cookie
- return if cookies[:experimentation_subject_id].present?
-
- cookies.permanent.signed[:experimentation_subject_id] = {
- value: SecureRandom.uuid,
- secure: ::Gitlab.config.gitlab.https,
- httponly: true
- }
- end
-
- def push_frontend_experiment(experiment_key)
- var_name = experiment_key.to_s.camelize(:lower)
- enabled = experiment_enabled?(experiment_key)
-
- gon.push({ experiments: { var_name => enabled } }, true)
- end
-
- def experiment_enabled?(experiment_key)
- return false if dnt_enabled?
-
- return true if Experimentation.enabled_for_value?(experiment_key, experimentation_subject_index(experiment_key))
- return true if forced_enabled?(experiment_key)
-
- false
- end
-
- def track_experiment_event(experiment_key, action, value = nil)
- return if dnt_enabled?
-
- track_experiment_event_for(experiment_key, action, value) do |tracking_data|
- ::Gitlab::Tracking.event(tracking_data.delete(:category), tracking_data.delete(:action), **tracking_data)
- end
- end
-
- def frontend_experimentation_tracking_data(experiment_key, action, value = nil)
- return if dnt_enabled?
-
- track_experiment_event_for(experiment_key, action, value) do |tracking_data|
- gon.push(tracking_data: tracking_data)
- end
- end
-
- def record_experiment_user(experiment_key)
- return if dnt_enabled?
- return unless Experimentation.enabled?(experiment_key) && current_user
-
- ::Experiment.add_user(experiment_key, tracking_group(experiment_key), current_user)
- end
-
- def experiment_tracking_category_and_group(experiment_key)
- "#{tracking_category(experiment_key)}:#{tracking_group(experiment_key, '_group')}"
- end
-
- private
-
- def dnt_enabled?
- Gitlab::Utils.to_boolean(request.headers['DNT'])
- end
-
- def experimentation_subject_id
- cookies.signed[:experimentation_subject_id]
- end
-
- def experimentation_subject_index(experiment_key)
- return if experimentation_subject_id.blank?
-
- if Experimentation.experiment(experiment_key).use_backwards_compatible_subject_index
- experimentation_subject_id.delete('-').hex % 100
- else
- Zlib.crc32("#{experiment_key}#{experimentation_subject_id}") % 100
- end
- end
-
- def track_experiment_event_for(experiment_key, action, value)
- return unless Experimentation.enabled?(experiment_key)
-
- yield experimentation_tracking_data(experiment_key, action, value)
- end
-
- def experimentation_tracking_data(experiment_key, action, value)
- {
- category: tracking_category(experiment_key),
- action: action,
- property: tracking_group(experiment_key, "_group"),
- label: experimentation_subject_id,
- value: value
- }.compact
- end
-
- def tracking_category(experiment_key)
- Experimentation.experiment(experiment_key).tracking_category
- end
-
- def tracking_group(experiment_key, suffix = nil)
- return unless Experimentation.enabled?(experiment_key)
-
- group = experiment_enabled?(experiment_key) ? GROUP_EXPERIMENTAL : GROUP_CONTROL
-
- suffix ? "#{group}#{suffix}" : group
- end
-
- def forced_enabled?(experiment_key)
- params.has_key?(:force_experiment) && params[:force_experiment] == experiment_key.to_s
- end
- end
-
class << self
def experiment(key)
Experiment.new(EXPERIMENTS[key].merge(key: key))
diff --git a/lib/gitlab/experimentation/controller_concern.rb b/lib/gitlab/experimentation/controller_concern.rb
new file mode 100644
index 00000000000..c6d15d7d82d
--- /dev/null
+++ b/lib/gitlab/experimentation/controller_concern.rb
@@ -0,0 +1,127 @@
+# frozen_string_literal: true
+
+require 'zlib'
+
+# Controller concern that checks if an `experimentation_subject_id cookie` is present and sets it if absent.
+# Used for A/B testing of experimental features. Exposes the `experiment_enabled?(experiment_name)` method
+# to controllers and views. It returns true when the experiment is enabled and the user is selected as part
+# of the experimental group.
+#
+module Gitlab
+ module Experimentation
+ module ControllerConcern
+ include ::Gitlab::Experimentation::GroupTypes
+ extend ActiveSupport::Concern
+
+ included do
+ before_action :set_experimentation_subject_id_cookie, unless: :dnt_enabled?
+ helper_method :experiment_enabled?, :experiment_tracking_category_and_group
+ end
+
+ def set_experimentation_subject_id_cookie
+ return if cookies[:experimentation_subject_id].present?
+
+ cookies.permanent.signed[:experimentation_subject_id] = {
+ value: SecureRandom.uuid,
+ secure: ::Gitlab.config.gitlab.https,
+ httponly: true
+ }
+ end
+
+ def push_frontend_experiment(experiment_key)
+ var_name = experiment_key.to_s.camelize(:lower)
+ enabled = experiment_enabled?(experiment_key)
+
+ gon.push({ experiments: { var_name => enabled } }, true)
+ end
+
+ def experiment_enabled?(experiment_key)
+ return false if dnt_enabled?
+
+ return true if Experimentation.enabled_for_value?(experiment_key, experimentation_subject_index(experiment_key))
+ return true if forced_enabled?(experiment_key)
+
+ false
+ end
+
+ def track_experiment_event(experiment_key, action, value = nil)
+ return if dnt_enabled?
+
+ track_experiment_event_for(experiment_key, action, value) do |tracking_data|
+ ::Gitlab::Tracking.event(tracking_data.delete(:category), tracking_data.delete(:action), **tracking_data)
+ end
+ end
+
+ def frontend_experimentation_tracking_data(experiment_key, action, value = nil)
+ return if dnt_enabled?
+
+ track_experiment_event_for(experiment_key, action, value) do |tracking_data|
+ gon.push(tracking_data: tracking_data)
+ end
+ end
+
+ def record_experiment_user(experiment_key)
+ return if dnt_enabled?
+ return unless Experimentation.enabled?(experiment_key) && current_user
+
+ ::Experiment.add_user(experiment_key, tracking_group(experiment_key), current_user)
+ end
+
+ def experiment_tracking_category_and_group(experiment_key)
+ "#{tracking_category(experiment_key)}:#{tracking_group(experiment_key, '_group')}"
+ end
+
+ private
+
+ def dnt_enabled?
+ Gitlab::Utils.to_boolean(request.headers['DNT'])
+ end
+
+ def experimentation_subject_id
+ cookies.signed[:experimentation_subject_id]
+ end
+
+ def experimentation_subject_index(experiment_key)
+ return if experimentation_subject_id.blank?
+
+ if Experimentation.experiment(experiment_key).use_backwards_compatible_subject_index
+ experimentation_subject_id.delete('-').hex % 100
+ else
+ Zlib.crc32("#{experiment_key}#{experimentation_subject_id}") % 100
+ end
+ end
+
+ def track_experiment_event_for(experiment_key, action, value)
+ return unless Experimentation.enabled?(experiment_key)
+
+ yield experimentation_tracking_data(experiment_key, action, value)
+ end
+
+ def experimentation_tracking_data(experiment_key, action, value)
+ {
+ category: tracking_category(experiment_key),
+ action: action,
+ property: tracking_group(experiment_key, "_group"),
+ label: experimentation_subject_id,
+ value: value
+ }.compact
+ end
+
+ def tracking_category(experiment_key)
+ Experimentation.experiment(experiment_key).tracking_category
+ end
+
+ def tracking_group(experiment_key, suffix = nil)
+ return unless Experimentation.enabled?(experiment_key)
+
+ group = experiment_enabled?(experiment_key) ? GROUP_EXPERIMENTAL : GROUP_CONTROL
+
+ suffix ? "#{group}#{suffix}" : group
+ end
+
+ def forced_enabled?(experiment_key)
+ params.has_key?(:force_experiment) && params[:force_experiment] == experiment_key.to_s
+ end
+ end
+ end
+end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 36c8d1c7ae5..675c700c3e7 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -6958,9 +6958,6 @@ msgstr ""
msgid "Compliance framework (optional)"
msgstr ""
-msgid "Compliance frameworks"
-msgstr ""
-
msgid "ComplianceDashboard|created by:"
msgstr ""
@@ -19103,6 +19100,9 @@ msgstr ""
msgid "Outdent"
msgstr ""
+msgid "Overall Activity"
+msgstr ""
+
msgid "Overridden"
msgstr ""
@@ -20339,6 +20339,9 @@ msgstr ""
msgid "Prevent users from changing their profile name"
msgstr ""
+msgid "Prevent users from modifying merge request approvers list"
+msgstr ""
+
msgid "Prevent users from performing write operations on GitLab while performing maintenance."
msgstr ""
@@ -22252,9 +22255,6 @@ msgstr ""
msgid "Registry setup"
msgstr ""
-msgid "Regulate approvals by authors/committers, based on compliance frameworks. Can be changed only at the instance level."
-msgstr ""
-
msgid "Reindexing status"
msgstr ""
@@ -22751,6 +22751,9 @@ msgstr ""
msgid "Repositories Analytics"
msgstr ""
+msgid "RepositoriesAnalytics|Average Coverage by Job"
+msgstr ""
+
msgid "RepositoriesAnalytics|Coverage"
msgstr ""
@@ -22781,12 +22784,18 @@ msgstr ""
msgid "RepositoriesAnalytics|Please select projects to display."
msgstr ""
+msgid "RepositoriesAnalytics|Projects with Tests"
+msgstr ""
+
msgid "RepositoriesAnalytics|Test Code Coverage"
msgstr ""
msgid "RepositoriesAnalytics|There was an error fetching the projects."
msgstr ""
+msgid "RepositoriesAnalytics|Total Number of Coverages"
+msgstr ""
+
msgid "Repository"
msgstr ""
@@ -22974,9 +22983,6 @@ msgstr ""
msgid "Reset authorization key?"
msgstr ""
-msgid "Reset filters"
-msgstr ""
-
msgid "Reset health check access token"
msgstr ""
@@ -26653,9 +26659,6 @@ msgstr ""
msgid "The X509 Certificate to use when mutual TLS is required to communicate with the external authorization service. If left blank, the server certificate is still validated when accessing over HTTPS."
msgstr ""
-msgid "The above settings apply to all projects with the selected compliance framework(s)."
-msgstr ""
-
msgid "The application will be used where the client secret can be kept confidential. Native mobile apps and Single Page Apps are considered non-confidential."
msgstr ""
diff --git a/spec/controllers/concerns/lfs_request_spec.rb b/spec/controllers/concerns/lfs_request_spec.rb
deleted file mode 100644
index 3bafd761a3e..00000000000
--- a/spec/controllers/concerns/lfs_request_spec.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe LfsRequest do
- include ProjectForksHelper
-
- controller(Repositories::GitHttpClientController) do
- # `described_class` is not available in this context
- include LfsRequest
-
- def show
- head :ok
- end
-
- def project
- @project ||= Project.find_by(id: params[:id])
- end
-
- def download_request?
- true
- end
-
- def upload_request?
- false
- end
-
- def ci?
- false
- end
- end
-
- let(:project) { create(:project, :public) }
-
- before do
- stub_lfs_setting(enabled: true)
- end
-
- context 'user is authenticated without access to lfs' do
- before do
- allow(controller).to receive(:authenticate_user)
- allow(controller).to receive(:authentication_result) do
- Gitlab::Auth::Result.new
- end
- end
-
- context 'with access to the project' do
- it 'returns 403' do
- get :show, params: { id: project.id }
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'without access to the project' do
- context 'project does not exist' do
- it 'returns 404' do
- get :show, params: { id: 'does not exist' }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'project is private' do
- let(:project) { create(:project, :private) }
-
- it 'returns 404' do
- get :show, params: { id: project.id }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
- end
-end
diff --git a/spec/factories/design_management/designs.rb b/spec/factories/design_management/designs.rb
index c58763791cc..c4fb330a0da 100644
--- a/spec/factories/design_management/designs.rb
+++ b/spec/factories/design_management/designs.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :design, class: 'DesignManagement::Design' do
+ factory :design, traits: [:has_internal_id], class: 'DesignManagement::Design' do
issue { association(:issue) }
project { issue&.project || association(:project) }
sequence(:filename) { |n| "homescreen-#{n}.jpg" }
diff --git a/spec/factories/wikis.rb b/spec/factories/wikis.rb
index 86d98bfd756..05f6fb0de58 100644
--- a/spec/factories/wikis.rb
+++ b/spec/factories/wikis.rb
@@ -17,5 +17,9 @@ FactoryBot.define do
container { project }
end
+
+ trait :empty_repo do
+ after(:create, &:create_wiki_repository)
+ end
end
end
diff --git a/spec/features/runners_spec.rb b/spec/features/runners_spec.rb
index 6e18de3be7b..9697e10c3d1 100644
--- a/spec/features/runners_spec.rb
+++ b/spec/features/runners_spec.rb
@@ -122,6 +122,19 @@ RSpec.describe 'Runners' do
end
end
+ context 'when multiple runners are configured' do
+ let!(:specific_runner) { create(:ci_runner, :project, projects: [project]) }
+ let!(:specific_runner_2) { create(:ci_runner, :project, projects: [project]) }
+
+ it 'adds pagination to the runner list' do
+ stub_const('Projects::Settings::CiCdController::NUMBER_OF_RUNNERS_PER_PAGE', 1)
+
+ visit project_runners_path(project)
+
+ expect(find('.pagination')).not_to be_nil
+ end
+ end
+
context 'when a specific runner exists in another project' do
let(:another_project) { create(:project) }
let!(:specific_runner) { create(:ci_runner, :project, projects: [another_project]) }
diff --git a/spec/fixtures/lib/gitlab/import_export/designs/project.json b/spec/fixtures/lib/gitlab/import_export/designs/project.json
index ebc08868d9e..e11b10a1d4c 100644
--- a/spec/fixtures/lib/gitlab/import_export/designs/project.json
+++ b/spec/fixtures/lib/gitlab/import_export/designs/project.json
@@ -98,6 +98,7 @@
"designs":[
{
"id":38,
+ "iid": 1,
"project_id":30,
"issue_id":469,
"filename":"chirrido3.jpg",
@@ -107,6 +108,7 @@
},
{
"id":39,
+ "iid": 2,
"project_id":30,
"issue_id":469,
"filename":"jonathan_richman.jpg",
@@ -116,6 +118,7 @@
},
{
"id":40,
+ "iid": 3,
"project_id":30,
"issue_id":469,
"filename":"mariavontrap.jpeg",
@@ -137,6 +140,7 @@
"event":0,
"design":{
"id":38,
+ "iid": 1,
"project_id":30,
"issue_id":469,
"filename":"chirrido3.jpg"
@@ -156,6 +160,7 @@
"event":1,
"design":{
"id":38,
+ "iid": 1,
"project_id":30,
"issue_id":469,
"filename":"chirrido3.jpg"
@@ -167,6 +172,7 @@
"event":0,
"design":{
"id":39,
+ "iid": 2,
"project_id":30,
"issue_id":469,
"filename":"jonathan_richman.jpg"
@@ -186,6 +192,7 @@
"event":1,
"design":{
"id":38,
+ "iid": 1,
"project_id":30,
"issue_id":469,
"filename":"chirrido3.jpg"
@@ -197,6 +204,7 @@
"event":2,
"design":{
"id":39,
+ "iid": 2,
"project_id":30,
"issue_id":469,
"filename":"jonathan_richman.jpg"
@@ -208,6 +216,7 @@
"event":0,
"design":{
"id":40,
+ "iid": 3,
"project_id":30,
"issue_id":469,
"filename":"mariavontrap.jpeg"
diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js
index bd21252eb5a..71e0ffd176f 100644
--- a/spec/frontend/diffs/components/diff_file_spec.js
+++ b/spec/frontend/diffs/components/diff_file_spec.js
@@ -2,6 +2,7 @@ import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import createDiffsStore from '~/diffs/store/modules';
+import createNotesStore from '~/notes/stores/modules';
import diffFileMockDataReadable from '../mock_data/diff_file';
import diffFileMockDataUnreadable from '../mock_data/diff_file_unreadable';
@@ -10,9 +11,13 @@ import DiffFileHeaderComponent from '~/diffs/components/diff_file_header.vue';
import DiffContentComponent from '~/diffs/components/diff_content.vue';
import eventHub from '~/diffs/event_hub';
+import {
+ EVT_EXPAND_ALL_FILES,
+ EVT_PERF_MARK_DIFF_FILES_END,
+ EVT_PERF_MARK_FIRST_DIFF_FILE_SHOWN,
+} from '~/diffs/constants';
import { diffViewerModes, diffViewerErrors } from '~/ide/constants';
-import { EVT_EXPAND_ALL_FILES } from '~/diffs/constants';
function changeViewer(store, index, { automaticallyCollapsed, manuallyCollapsed, name }) {
const file = store.state.diffs.diffFiles[index];
@@ -58,12 +63,13 @@ function markFileToBeRendered(store, index = 0) {
});
}
-function createComponent({ file }) {
+function createComponent({ file, first = false, last = false }) {
const localVue = createLocalVue();
localVue.use(Vuex);
const store = new Vuex.Store({
+ ...createNotesStore(),
modules: {
diffs: createDiffsStore(),
},
@@ -78,6 +84,8 @@ function createComponent({ file }) {
file,
canCurrentUserFork: false,
viewDiffsFileByFile: false,
+ isFirstFile: first,
+ isLastFile: last,
},
});
@@ -117,6 +125,72 @@ describe('DiffFile', () => {
afterEach(() => {
wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('bus events', () => {
+ beforeEach(() => {
+ jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
+ });
+
+ describe('during mount', () => {
+ it.each`
+ first | last | events | file
+ ${false} | ${false} | ${[]} | ${{ inlineLines: [], parallelLines: [], readableText: true }}
+ ${true} | ${true} | ${[]} | ${{ inlineLines: [], parallelLines: [], readableText: true }}
+ ${true} | ${false} | ${[EVT_PERF_MARK_FIRST_DIFF_FILE_SHOWN]} | ${false}
+ ${false} | ${true} | ${[EVT_PERF_MARK_DIFF_FILES_END]} | ${false}
+ ${true} | ${true} | ${[EVT_PERF_MARK_FIRST_DIFF_FILE_SHOWN, EVT_PERF_MARK_DIFF_FILES_END]} | ${false}
+ `(
+ 'emits the events $events based on the file and its position ({ first: $first, last: $last }) among all files',
+ async ({ file, first, last, events }) => {
+ if (file) {
+ forceHasDiff({ store, ...file });
+ }
+
+ ({ wrapper, store } = createComponent({
+ file: store.state.diffs.diffFiles[0],
+ first,
+ last,
+ }));
+
+ await wrapper.vm.$nextTick();
+
+ expect(eventHub.$emit).toHaveBeenCalledTimes(events.length);
+ events.forEach(event => {
+ expect(eventHub.$emit).toHaveBeenCalledWith(event);
+ });
+ },
+ );
+ });
+
+ describe('after loading the diff', () => {
+ it('indicates that it loaded the file', async () => {
+ forceHasDiff({ store, inlineLines: [], parallelLines: [], readableText: true });
+ ({ wrapper, store } = createComponent({
+ file: store.state.diffs.diffFiles[0],
+ first: true,
+ last: true,
+ }));
+
+ jest.spyOn(wrapper.vm, 'loadCollapsedDiff').mockResolvedValue(getReadableFile());
+ jest.spyOn(window, 'requestIdleCallback').mockImplementation(fn => fn());
+
+ makeFileAutomaticallyCollapsed(store);
+
+ await wrapper.vm.$nextTick(); // Wait for store updates to flow into the component
+
+ toggleFile(wrapper);
+
+ await wrapper.vm.$nextTick(); // Wait for the load to resolve
+ await wrapper.vm.$nextTick(); // Wait for the idleCallback
+ await wrapper.vm.$nextTick(); // Wait for nextTick inside postRender
+
+ expect(eventHub.$emit).toHaveBeenCalledTimes(2);
+ expect(eventHub.$emit).toHaveBeenCalledWith(EVT_PERF_MARK_FIRST_DIFF_FILE_SHOWN);
+ expect(eventHub.$emit).toHaveBeenCalledWith(EVT_PERF_MARK_DIFF_FILES_END);
+ });
+ });
});
describe('template', () => {
diff --git a/spec/frontend/search/dropdown_filter/components/dropdown_filter_spec.js b/spec/frontend/search/dropdown_filter/components/dropdown_filter_spec.js
new file mode 100644
index 00000000000..f795a23404e
--- /dev/null
+++ b/spec/frontend/search/dropdown_filter/components/dropdown_filter_spec.js
@@ -0,0 +1,198 @@
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { MOCK_QUERY } from 'jest/search/mock_data';
+import * as urlUtils from '~/lib/utils/url_utility';
+import initStore from '~/search/store';
+import DropdownFilter from '~/search/dropdown_filter/components/dropdown_filter.vue';
+import stateFilterData from '~/search/dropdown_filter/constants/state_filter_data';
+import confidentialFilterData from '~/search/dropdown_filter/constants/confidential_filter_data';
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ visitUrl: jest.fn(),
+ setUrlParams: jest.fn(),
+}));
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('DropdownFilter', () => {
+ let wrapper;
+ let store;
+
+ const createStore = options => {
+ store = initStore({ query: MOCK_QUERY, ...options });
+ };
+
+ const createComponent = (props = { filterData: stateFilterData }) => {
+ wrapper = shallowMount(DropdownFilter, {
+ localVue,
+ store,
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ store = null;
+ });
+
+ const findGlDropdown = () => wrapper.find(GlDropdown);
+ const findGlDropdownItems = () => findGlDropdown().findAll(GlDropdownItem);
+ const findDropdownItemsText = () => findGlDropdownItems().wrappers.map(w => w.text());
+ const firstDropDownItem = () => findGlDropdownItems().at(0);
+
+ describe('StatusFilter', () => {
+ describe('template', () => {
+ describe.each`
+ scope | showDropdown
+ ${'issues'} | ${true}
+ ${'merge_requests'} | ${true}
+ ${'projects'} | ${false}
+ ${'milestones'} | ${false}
+ ${'users'} | ${false}
+ ${'notes'} | ${false}
+ ${'wiki_blobs'} | ${false}
+ ${'blobs'} | ${false}
+ `(`dropdown`, ({ scope, showDropdown }) => {
+ beforeEach(() => {
+ createStore({ query: { ...MOCK_QUERY, scope } });
+ createComponent();
+ });
+
+ it(`does${showDropdown ? '' : ' not'} render when scope is ${scope}`, () => {
+ expect(findGlDropdown().exists()).toBe(showDropdown);
+ });
+ });
+
+ describe.each`
+ initialFilter | label
+ ${stateFilterData.filters.ANY.value} | ${`Any ${stateFilterData.header}`}
+ ${stateFilterData.filters.OPEN.value} | ${stateFilterData.filters.OPEN.label}
+ ${stateFilterData.filters.CLOSED.value} | ${stateFilterData.filters.CLOSED.label}
+ `(`filter text`, ({ initialFilter, label }) => {
+ describe(`when initialFilter is ${initialFilter}`, () => {
+ beforeEach(() => {
+ createStore({ query: { ...MOCK_QUERY, [stateFilterData.filterParam]: initialFilter } });
+ createComponent();
+ });
+
+ it(`sets dropdown label to ${label}`, () => {
+ expect(findGlDropdown().attributes('text')).toBe(label);
+ });
+ });
+ });
+ });
+
+ describe('Filter options', () => {
+ beforeEach(() => {
+ createStore();
+ createComponent();
+ });
+
+ it('renders a dropdown item for each filterOption', () => {
+ expect(findDropdownItemsText()).toStrictEqual(
+ stateFilterData.filterByScope[stateFilterData.scopes.ISSUES].map(v => {
+ return v.label;
+ }),
+ );
+ });
+
+ it('clicking a dropdown item calls setUrlParams', () => {
+ const filter = stateFilterData.filters[Object.keys(stateFilterData.filters)[0]].value;
+ firstDropDownItem().vm.$emit('click');
+
+ expect(urlUtils.setUrlParams).toHaveBeenCalledWith({
+ page: null,
+ [stateFilterData.filterParam]: filter,
+ });
+ });
+
+ it('clicking a dropdown item calls visitUrl', () => {
+ firstDropDownItem().vm.$emit('click');
+
+ expect(urlUtils.visitUrl).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('ConfidentialFilter', () => {
+ describe('template', () => {
+ describe.each`
+ scope | showDropdown
+ ${'issues'} | ${true}
+ ${'merge_requests'} | ${false}
+ ${'projects'} | ${false}
+ ${'milestones'} | ${false}
+ ${'users'} | ${false}
+ ${'notes'} | ${false}
+ ${'wiki_blobs'} | ${false}
+ ${'blobs'} | ${false}
+ `(`dropdown`, ({ scope, showDropdown }) => {
+ beforeEach(() => {
+ createStore({ query: { ...MOCK_QUERY, scope } });
+ createComponent({ filterData: confidentialFilterData });
+ });
+
+ it(`does${showDropdown ? '' : ' not'} render when scope is ${scope}`, () => {
+ expect(findGlDropdown().exists()).toBe(showDropdown);
+ });
+ });
+
+ describe.each`
+ initialFilter | label
+ ${confidentialFilterData.filters.ANY.value} | ${`Any ${confidentialFilterData.header}`}
+ ${confidentialFilterData.filters.CONFIDENTIAL.value} | ${confidentialFilterData.filters.CONFIDENTIAL.label}
+ ${confidentialFilterData.filters.NOT_CONFIDENTIAL.value} | ${confidentialFilterData.filters.NOT_CONFIDENTIAL.label}
+ `(`filter text`, ({ initialFilter, label }) => {
+ describe(`when initialFilter is ${initialFilter}`, () => {
+ beforeEach(() => {
+ createStore({
+ query: { ...MOCK_QUERY, [confidentialFilterData.filterParam]: initialFilter },
+ });
+ createComponent({ filterData: confidentialFilterData });
+ });
+
+ it(`sets dropdown label to ${label}`, () => {
+ expect(findGlDropdown().attributes('text')).toBe(label);
+ });
+ });
+ });
+ });
+ });
+
+ describe('Filter options', () => {
+ beforeEach(() => {
+ createStore();
+ createComponent({ filterData: confidentialFilterData });
+ });
+
+ it('renders a dropdown item for each filterOption', () => {
+ expect(findDropdownItemsText()).toStrictEqual(
+ confidentialFilterData.filterByScope[confidentialFilterData.scopes.ISSUES].map(v => {
+ return v.label;
+ }),
+ );
+ });
+
+ it('clicking a dropdown item calls setUrlParams', () => {
+ const filter =
+ confidentialFilterData.filters[Object.keys(confidentialFilterData.filters)[0]].value;
+ firstDropDownItem().vm.$emit('click');
+
+ expect(urlUtils.setUrlParams).toHaveBeenCalledWith({
+ page: null,
+ [confidentialFilterData.filterParam]: filter,
+ });
+ });
+
+ it('clicking a dropdown item calls visitUrl', () => {
+ firstDropDownItem().vm.$emit('click');
+
+ expect(urlUtils.visitUrl).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/search/sidebar/components/app_spec.js b/spec/frontend/search/sidebar/components/app_spec.js
deleted file mode 100644
index c68be10f664..00000000000
--- a/spec/frontend/search/sidebar/components/app_spec.js
+++ /dev/null
@@ -1,99 +0,0 @@
-import Vuex from 'vuex';
-import { createLocalVue, shallowMount } from '@vue/test-utils';
-import { GlButton, GlLink } from '@gitlab/ui';
-import { MOCK_QUERY } from 'jest/search/mock_data';
-import GlobalSearchSidebar from '~/search/sidebar/components/app.vue';
-import ConfidentialityFilter from '~/search/sidebar/components/confidentiality_filter.vue';
-import StatusFilter from '~/search/sidebar/components/status_filter.vue';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-describe('GlobalSearchSidebar', () => {
- let wrapper;
-
- const actionSpies = {
- applyQuery: jest.fn(),
- resetQuery: jest.fn(),
- };
-
- const createComponent = initialState => {
- const store = new Vuex.Store({
- state: {
- query: MOCK_QUERY,
- ...initialState,
- },
- actions: actionSpies,
- });
-
- wrapper = shallowMount(GlobalSearchSidebar, {
- localVue,
- store,
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- const findSidebarForm = () => wrapper.find('form');
- const findStatusFilter = () => wrapper.find(StatusFilter);
- const findConfidentialityFilter = () => wrapper.find(ConfidentialityFilter);
- const findApplyButton = () => wrapper.find(GlButton);
- const findResetLinkButton = () => wrapper.find(GlLink);
-
- describe('template', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders StatusFilter always', () => {
- expect(findStatusFilter().exists()).toBe(true);
- });
-
- it('renders ConfidentialityFilter always', () => {
- expect(findConfidentialityFilter().exists()).toBe(true);
- });
-
- it('renders ApplyButton always', () => {
- expect(findApplyButton().exists()).toBe(true);
- });
-
- describe('ResetLinkButton', () => {
- describe('with no filter selected', () => {
- beforeEach(() => {
- createComponent({ query: {} });
- });
-
- it('does not render', () => {
- expect(findResetLinkButton().exists()).toBe(false);
- });
- });
-
- describe('with filter selected', () => {
- it('does render when a filter selected', () => {
- expect(findResetLinkButton().exists()).toBe(true);
- });
- });
- });
- });
-
- describe('actions', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('clicking ApplyButton calls applyQuery', () => {
- findSidebarForm().trigger('submit');
-
- expect(actionSpies.applyQuery).toHaveBeenCalled();
- });
-
- it('clicking ResetLinkButton calls resetQuery', () => {
- findResetLinkButton().vm.$emit('click');
-
- expect(actionSpies.resetQuery).toHaveBeenCalled();
- });
- });
-});
diff --git a/spec/frontend/search/store/actions_spec.js b/spec/frontend/search/store/actions_spec.js
index 35d97c7dcb1..0bab4ce17a6 100644
--- a/spec/frontend/search/store/actions_spec.js
+++ b/spec/frontend/search/store/actions_spec.js
@@ -2,7 +2,6 @@ import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/search/store/actions';
import * as types from '~/search/store/mutation_types';
-import * as urlUtils from '~/lib/utils/url_utility';
import state from '~/search/store/state';
import axios from '~/lib/utils/axios_utils';
import createFlash from '~/flash';
@@ -43,47 +42,6 @@ describe('Global Search Store Actions', () => {
});
});
});
-
- describe('setQuery', () => {
- const payload = { key: 'key1', value: 'value1' };
-
- it('calls the SET_QUERY mutation', done => {
- testAction(actions.setQuery, payload, state, [{ type: types.SET_QUERY, payload }], [], done);
- });
- });
-
- describe('applyQuery', () => {
- beforeEach(() => {
- urlUtils.setUrlParams = jest.fn();
- urlUtils.visitUrl = jest.fn();
- });
-
- it('calls visitUrl and setParams with the state.query', () => {
- testAction(actions.applyQuery, null, state, [], [], () => {
- expect(urlUtils.setUrlParams).toHaveBeenCalledWith({ ...state.query, page: null });
- expect(urlUtils.visitUrl).toHaveBeenCalled();
- });
- });
- });
-
- describe('resetQuery', () => {
- beforeEach(() => {
- urlUtils.setUrlParams = jest.fn();
- urlUtils.visitUrl = jest.fn();
- });
-
- it('calls visitUrl and setParams with empty values', () => {
- testAction(actions.resetQuery, null, state, [], [], () => {
- expect(urlUtils.setUrlParams).toHaveBeenCalledWith({
- ...state.query,
- page: null,
- state: null,
- confidential: null,
- });
- expect(urlUtils.visitUrl).toHaveBeenCalled();
- });
- });
- });
});
describe('setQuery', () => {
diff --git a/spec/graphql/mutations/releases/create_spec.rb b/spec/graphql/mutations/releases/create_spec.rb
new file mode 100644
index 00000000000..d6305691dac
--- /dev/null
+++ b/spec/graphql/mutations/releases/create_spec.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Releases::Create do
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:milestone_12_3) { create(:milestone, project: project, title: '12.3') }
+ let_it_be(:milestone_12_4) { create(:milestone, project: project, title: '12.4') }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
+
+ let(:tag) { 'v1.1.0'}
+ let(:ref) { 'master'}
+ let(:name) { 'Version 1.0'}
+ let(:description) { 'The first release :rocket:' }
+ let(:released_at) { Time.parse('2018-12-10') }
+ let(:milestones) { [milestone_12_3.title, milestone_12_4.title] }
+ let(:assets) do
+ {
+ links: [
+ {
+ name: 'An asset link',
+ url: 'https://gitlab.example.com/link',
+ filepath: '/permanent/link',
+ link_type: 'other'
+ }
+ ]
+ }
+ end
+
+ let(:mutation_arguments) do
+ {
+ project_path: project.full_path,
+ tag: tag,
+ ref: ref,
+ name: name,
+ description: description,
+ released_at: released_at,
+ milestones: milestones,
+ assets: assets
+ }
+ end
+
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ before do
+ project.add_reporter(reporter)
+ project.add_developer(developer)
+ end
+
+ describe '#resolve' do
+ subject(:resolve) do
+ mutation.resolve(**mutation_arguments)
+ end
+
+ let(:new_release) { subject[:release] }
+
+ context 'when the current user has access to create releases' do
+ let(:current_user) { developer }
+
+ it 'returns no errors' do
+ expect(resolve).to include(errors: [])
+ end
+
+ it 'creates the release with the correct tag' do
+ expect(new_release.tag).to eq(tag)
+ end
+
+ it 'creates the release with the correct name' do
+ expect(new_release.name).to eq(name)
+ end
+
+ it 'creates the release with the correct description' do
+ expect(new_release.description).to eq(description)
+ end
+
+ it 'creates the release with the correct released_at' do
+ expect(new_release.released_at).to eq(released_at)
+ end
+
+ it 'creates the release with the correct created_at' do
+ expect(new_release.created_at).to eq(Time.current)
+ end
+
+ it 'creates the release with the correct milestone associations' do
+ expected_milestone_titles = [milestone_12_3.title, milestone_12_4.title]
+ actual_milestone_titles = new_release.milestones.map { |m| m.title }
+
+ # Right now the milestones are returned in a non-deterministic order.
+ # `match_array` should be updated to `eq` once
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/259012 is addressed.
+ expect(actual_milestone_titles).to match_array(expected_milestone_titles)
+ end
+
+ describe 'asset links' do
+ let(:expected_link) { assets[:links].first }
+ let(:new_link) { new_release.links.first }
+
+ it 'creates a single asset link' do
+ expect(new_release.links.size).to eq(1)
+ end
+
+ it 'creates the link with the correct name' do
+ expect(new_link.name).to eq(expected_link[:name])
+ end
+
+ it 'creates the link with the correct url' do
+ expect(new_link.url).to eq(expected_link[:url])
+ end
+
+ it 'creates the link with the correct link type' do
+ expect(new_link.link_type).to eq(expected_link[:link_type])
+ end
+
+ it 'creates the link with the correct direct filepath' do
+ expect(new_link.filepath).to eq(expected_link[:filepath])
+ end
+ end
+ end
+
+ context "when the current user doesn't have access to create releases" do
+ let(:current_user) { reporter }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/metadata_resolver_spec.rb b/spec/graphql/resolvers/metadata_resolver_spec.rb
index 20556941de4..f8c01f9d531 100644
--- a/spec/graphql/resolvers/metadata_resolver_spec.rb
+++ b/spec/graphql/resolvers/metadata_resolver_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Resolvers::MetadataResolver do
describe '#resolve' do
it 'returns version and revision' do
- expect(resolve(described_class)).to eq(version: Gitlab::VERSION, revision: Gitlab.revision)
+ expect(resolve(described_class)).to have_attributes(version: Gitlab::VERSION, revision: Gitlab.revision)
end
end
end
diff --git a/spec/graphql/types/release_asset_link_input_type_spec.rb b/spec/graphql/types/release_asset_link_input_type_spec.rb
new file mode 100644
index 00000000000..d97a91b609a
--- /dev/null
+++ b/spec/graphql/types/release_asset_link_input_type_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::ReleaseAssetLinkInputType do
+ specify { expect(described_class.graphql_name).to eq('ReleaseAssetLinkInput') }
+
+ it 'has the correct arguments' do
+ expect(described_class.arguments.keys).to match_array(%w[name url directAssetPath linkType])
+ end
+
+ it 'sets the type of link_type argument to ReleaseAssetLinkTypeEnum' do
+ expect(described_class.arguments['linkType'].type).to eq(Types::ReleaseAssetLinkTypeEnum)
+ end
+end
diff --git a/spec/graphql/types/release_assets_input_type_spec.rb b/spec/graphql/types/release_assets_input_type_spec.rb
new file mode 100644
index 00000000000..c44abe1e171
--- /dev/null
+++ b/spec/graphql/types/release_assets_input_type_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::ReleaseAssetsInputType do
+ specify { expect(described_class.graphql_name).to eq('ReleaseAssetsInput') }
+
+ it 'has the correct arguments' do
+ expect(described_class.arguments.keys).to match_array(%w[links])
+ end
+
+ it 'sets the type of links argument to ReleaseAssetLinkInputType' do
+ expect(described_class.arguments['links'].type.of_type.of_type).to eq(Types::ReleaseAssetLinkInputType)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_design_internal_ids_spec.rb b/spec/lib/gitlab/background_migration/backfill_design_internal_ids_spec.rb
new file mode 100644
index 00000000000..4bf59a02a31
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_design_internal_ids_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillDesignInternalIds, :migration, schema: 20201030203854 do
+ subject { described_class.new(designs) }
+
+ let_it_be(:namespaces) { table(:namespaces) }
+ let_it_be(:projects) { table(:projects) }
+ let_it_be(:designs) { table(:design_management_designs) }
+
+ let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
+ let(:project) { projects.create!(namespace_id: namespace.id) }
+ let(:project_2) { projects.create!(namespace_id: namespace.id) }
+
+ def create_design!(proj = project)
+ designs.create!(project_id: proj.id, filename: generate(:filename))
+ end
+
+ def migrate!
+ relation = designs.where(project_id: [project.id, project_2.id]).select(:project_id).distinct
+
+ subject.perform(relation)
+ end
+
+ it 'backfills the iid for designs' do
+ 3.times { create_design! }
+
+ expect do
+ migrate!
+ end.to change { designs.pluck(:iid) }.from(contain_exactly(nil, nil, nil)).to(contain_exactly(1, 2, 3))
+ end
+
+ it 'scopes IIDs and handles range and starting-point correctly' do
+ create_design!.update!(iid: 10)
+ create_design!.update!(iid: 12)
+ create_design!(project_2).update!(iid: 7)
+ project_3 = projects.create!(namespace_id: namespace.id)
+
+ 2.times { create_design! }
+ 2.times { create_design!(project_2) }
+ 2.times { create_design!(project_3) }
+
+ migrate!
+
+ expect(designs.where(project_id: project.id).pluck(:iid)).to contain_exactly(10, 12, 13, 14)
+ expect(designs.where(project_id: project_2.id).pluck(:iid)).to contain_exactly(7, 8, 9)
+ expect(designs.where(project_id: project_3.id).pluck(:iid)).to contain_exactly(nil, nil)
+ end
+
+ it 'updates the internal ID records' do
+ design = create_design!
+ 2.times { create_design! }
+ design.update!(iid: 10)
+ scope = { project_id: project.id }
+ usage = :design_management_designs
+ init = ->(_d, _s) { 0 }
+
+ ::InternalId.track_greatest(design, scope, usage, 10, init)
+
+ migrate!
+
+ next_iid = ::InternalId.generate_next(design, scope, usage, init)
+
+ expect(designs.pluck(:iid)).to contain_exactly(10, 11, 12)
+ expect(design.reload.iid).to eq(10)
+ expect(next_iid).to eq(13)
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index a8edcc5f7e5..ff6e5437559 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -1680,7 +1680,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
has_internal_id :iid,
scope: :project,
- init: ->(s) { s&.project&.issues&.maximum(:iid) },
+ init: ->(s, _scope) { s&.project&.issues&.maximum(:iid) },
backfill: true,
presence: false
end
diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb
index 86b3c029944..359e0597f4e 100644
--- a/spec/lib/gitlab/database/reindexing_spec.rb
+++ b/spec/lib/gitlab/database/reindexing_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Gitlab::Database::Reindexing do
it 'retrieves regular indexes that are no left-overs from previous runs' do
result = double
- expect(Gitlab::Database::PostgresIndex).to receive_message_chain('regular.not_match.not_match').with(no_args).with('^tmp_reindex_').with('^old_reindex_').and_return(result)
+ expect(Gitlab::Database::PostgresIndex).to receive_message_chain('regular.where.not_match.not_match').with(no_args).with('NOT expression').with('^tmp_reindex_').with('^old_reindex_').and_return(result)
expect(subject).to eq(result)
end
diff --git a/spec/lib/gitlab/experimentation/controller_concern_spec.rb b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
new file mode 100644
index 00000000000..2fe3d36daf7
--- /dev/null
+++ b/spec/lib/gitlab/experimentation/controller_concern_spec.rb
@@ -0,0 +1,438 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Experimentation::ControllerConcern, type: :controller do
+ before do
+ stub_const('Gitlab::Experimentation::EXPERIMENTS', {
+ backwards_compatible_test_experiment: {
+ environment: environment,
+ tracking_category: 'Team',
+ use_backwards_compatible_subject_index: true
+ },
+ test_experiment: {
+ environment: environment,
+ tracking_category: 'Team'
+ }
+ }
+ )
+
+ Feature.enable_percentage_of_time(:backwards_compatible_test_experiment_experiment_percentage, enabled_percentage)
+ Feature.enable_percentage_of_time(:test_experiment_experiment_percentage, enabled_percentage)
+ end
+
+ let(:environment) { Rails.env.test? }
+ let(:enabled_percentage) { 10 }
+
+ controller(ApplicationController) do
+ include Gitlab::Experimentation::ControllerConcern
+
+ def index
+ head :ok
+ end
+ end
+
+ describe '#set_experimentation_subject_id_cookie' do
+ let(:do_not_track) { nil }
+ let(:cookie) { cookies.permanent.signed[:experimentation_subject_id] }
+
+ before do
+ request.headers['DNT'] = do_not_track if do_not_track.present?
+
+ get :index
+ end
+
+ context 'cookie is present' do
+ before do
+ cookies[:experimentation_subject_id] = 'test'
+ end
+
+ it 'does not change the cookie' do
+ expect(cookies[:experimentation_subject_id]).to eq 'test'
+ end
+ end
+
+ context 'cookie is not present' do
+ it 'sets a permanent signed cookie' do
+ expect(cookie).to be_present
+ end
+
+ context 'DNT: 0' do
+ let(:do_not_track) { '0' }
+
+ it 'sets a permanent signed cookie' do
+ expect(cookie).to be_present
+ end
+ end
+
+ context 'DNT: 1' do
+ let(:do_not_track) { '1' }
+
+ it 'does nothing' do
+ expect(cookie).not_to be_present
+ end
+ end
+ end
+ end
+
+ describe '#push_frontend_experiment' do
+ it 'pushes an experiment to the frontend' do
+ gon = instance_double('gon')
+ experiments = { experiments: { 'myExperiment' => true } }
+
+ stub_experiment_for_user(my_experiment: true)
+ allow(controller).to receive(:gon).and_return(gon)
+
+ expect(gon).to receive(:push).with(experiments, true)
+
+ controller.push_frontend_experiment(:my_experiment)
+ end
+ end
+
+ describe '#experiment_enabled?' do
+ def check_experiment(exp_key = :test_experiment)
+ controller.experiment_enabled?(exp_key)
+ end
+
+ subject { check_experiment }
+
+ context 'cookie is not present' do
+ it 'calls Gitlab::Experimentation.enabled_for_value? with the name of the experiment and an experimentation_subject_index of nil' do
+ expect(Gitlab::Experimentation).to receive(:enabled_for_value?).with(:test_experiment, nil)
+ check_experiment
+ end
+ end
+
+ context 'cookie is present' do
+ using RSpec::Parameterized::TableSyntax
+
+ before do
+ cookies.permanent.signed[:experimentation_subject_id] = 'abcd-1234'
+ get :index
+ end
+
+ where(:experiment_key, :index_value) do
+ :test_experiment | 40 # Zlib.crc32('test_experimentabcd-1234') % 100 = 40
+ :backwards_compatible_test_experiment | 76 # 'abcd1234'.hex % 100 = 76
+ end
+
+ with_them do
+ it 'calls Gitlab::Experimentation.enabled_for_value? with the name of the experiment and the calculated experimentation_subject_index based on the uuid' do
+ expect(Gitlab::Experimentation).to receive(:enabled_for_value?).with(experiment_key, index_value)
+ check_experiment(experiment_key)
+ end
+ end
+ end
+
+ it 'returns true when DNT: 0 is set in the request' do
+ allow(Gitlab::Experimentation).to receive(:enabled_for_value?) { true }
+ controller.request.headers['DNT'] = '0'
+
+ is_expected.to be_truthy
+ end
+
+ it 'returns false when DNT: 1 is set in the request' do
+ allow(Gitlab::Experimentation).to receive(:enabled_for_value?) { true }
+ controller.request.headers['DNT'] = '1'
+
+ is_expected.to be_falsy
+ end
+
+ describe 'URL parameter to force enable experiment' do
+ it 'returns true unconditionally' do
+ get :index, params: { force_experiment: :test_experiment }
+
+ is_expected.to be_truthy
+ end
+ end
+ end
+
+ describe '#track_experiment_event', :snowplow do
+ context 'when the experiment is enabled' do
+ before do
+ stub_experiment(test_experiment: true)
+ end
+
+ context 'the user is part of the experimental group' do
+ before do
+ stub_experiment_for_user(test_experiment: true)
+ end
+
+ it 'tracks the event with the right parameters' do
+ controller.track_experiment_event(:test_experiment, 'start', 1)
+
+ expect_snowplow_event(
+ category: 'Team',
+ action: 'start',
+ property: 'experimental_group',
+ value: 1
+ )
+ end
+ end
+
+ context 'the user is part of the control group' do
+ before do
+ stub_experiment_for_user(test_experiment: false)
+ end
+
+ it 'tracks the event with the right parameters' do
+ controller.track_experiment_event(:test_experiment, 'start', 1)
+
+ expect_snowplow_event(
+ category: 'Team',
+ action: 'start',
+ property: 'control_group',
+ value: 1
+ )
+ end
+ end
+
+ context 'do not track is disabled' do
+ before do
+ request.headers['DNT'] = '0'
+ end
+
+ it 'does track the event' do
+ controller.track_experiment_event(:test_experiment, 'start', 1)
+
+ expect_snowplow_event(
+ category: 'Team',
+ action: 'start',
+ property: 'control_group',
+ value: 1
+ )
+ end
+ end
+
+ context 'do not track enabled' do
+ before do
+ request.headers['DNT'] = '1'
+ end
+
+ it 'does not track the event' do
+ controller.track_experiment_event(:test_experiment, 'start', 1)
+
+ expect_no_snowplow_event
+ end
+ end
+ end
+
+ context 'when the experiment is disabled' do
+ before do
+ stub_experiment(test_experiment: false)
+ end
+
+ it 'does not track the event' do
+ controller.track_experiment_event(:test_experiment, 'start')
+
+ expect_no_snowplow_event
+ end
+ end
+ end
+
+ describe '#frontend_experimentation_tracking_data' do
+ context 'when the experiment is enabled' do
+ before do
+ stub_experiment(test_experiment: true)
+ end
+
+ context 'the user is part of the experimental group' do
+ before do
+ stub_experiment_for_user(test_experiment: true)
+ end
+
+ it 'pushes the right parameters to gon' do
+ controller.frontend_experimentation_tracking_data(:test_experiment, 'start', 'team_id')
+ expect(Gon.tracking_data).to eq(
+ {
+ category: 'Team',
+ action: 'start',
+ property: 'experimental_group',
+ value: 'team_id'
+ }
+ )
+ end
+ end
+
+ context 'the user is part of the control group' do
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
+ end
+ end
+
+ it 'pushes the right parameters to gon' do
+ controller.frontend_experimentation_tracking_data(:test_experiment, 'start', 'team_id')
+ expect(Gon.tracking_data).to eq(
+ {
+ category: 'Team',
+ action: 'start',
+ property: 'control_group',
+ value: 'team_id'
+ }
+ )
+ end
+
+ it 'does not send nil value to gon' do
+ controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
+ expect(Gon.tracking_data).to eq(
+ {
+ category: 'Team',
+ action: 'start',
+ property: 'control_group'
+ }
+ )
+ end
+ end
+
+ context 'do not track disabled' do
+ before do
+ request.headers['DNT'] = '0'
+ end
+
+ it 'pushes the right parameters to gon' do
+ controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
+
+ expect(Gon.tracking_data).to eq(
+ {
+ category: 'Team',
+ action: 'start',
+ property: 'control_group'
+ }
+ )
+ end
+ end
+
+ context 'do not track enabled' do
+ before do
+ request.headers['DNT'] = '1'
+ end
+
+ it 'does not push data to gon' do
+ controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
+
+ expect(Gon.method_defined?(:tracking_data)).to be_falsey
+ end
+ end
+ end
+
+ context 'when the experiment is disabled' do
+ before do
+ stub_experiment(test_experiment: false)
+ end
+
+ it 'does not push data to gon' do
+ expect(Gon.method_defined?(:tracking_data)).to be_falsey
+ controller.track_experiment_event(:test_experiment, 'start')
+ end
+ end
+ end
+
+ describe '#record_experiment_user' do
+ let(:user) { build(:user) }
+
+ context 'when the experiment is enabled' do
+ before do
+ stub_experiment(test_experiment: true)
+ allow(controller).to receive(:current_user).and_return(user)
+ end
+
+ context 'the user is part of the experimental group' do
+ before do
+ stub_experiment_for_user(test_experiment: true)
+ end
+
+ it 'calls add_user on the Experiment model' do
+ expect(::Experiment).to receive(:add_user).with(:test_experiment, :experimental, user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+
+ context 'the user is part of the control group' do
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
+ end
+ end
+
+ it 'calls add_user on the Experiment model' do
+ expect(::Experiment).to receive(:add_user).with(:test_experiment, :control, user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+ end
+
+ context 'when the experiment is disabled' do
+ before do
+ stub_experiment(test_experiment: false)
+ allow(controller).to receive(:current_user).and_return(user)
+ end
+
+ it 'does not call add_user on the Experiment model' do
+ expect(::Experiment).not_to receive(:add_user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+
+ context 'when there is no current_user' do
+ before do
+ stub_experiment(test_experiment: true)
+ end
+
+ it 'does not call add_user on the Experiment model' do
+ expect(::Experiment).not_to receive(:add_user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+
+ context 'do not track' do
+ before do
+ allow(controller).to receive(:current_user).and_return(user)
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
+ end
+ end
+
+ context 'is disabled' do
+ before do
+ request.headers['DNT'] = '0'
+ end
+
+ it 'calls add_user on the Experiment model' do
+ expect(::Experiment).to receive(:add_user).with(:test_experiment, :control, user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+
+ context 'is enabled' do
+ before do
+ request.headers['DNT'] = '1'
+ end
+
+ it 'does not call add_user on the Experiment model' do
+ expect(::Experiment).not_to receive(:add_user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+ end
+ end
+
+ describe '#experiment_tracking_category_and_group' do
+ let_it_be(:experiment_key) { :test_something }
+
+ subject { controller.experiment_tracking_category_and_group(experiment_key) }
+
+ it 'returns a string with the experiment tracking category & group joined with a ":"' do
+ expect(controller).to receive(:tracking_category).with(experiment_key).and_return('Experiment::Category')
+ expect(controller).to receive(:tracking_group).with(experiment_key, '_group').and_return('experimental_group')
+
+ expect(subject).to eq('Experiment::Category:experimental_group')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index 238392e2067..ebf98a0151f 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -50,420 +50,6 @@ RSpec.describe Gitlab::Experimentation, :snowplow do
let(:environment) { Rails.env.test? }
let(:enabled_percentage) { 10 }
- describe Gitlab::Experimentation::ControllerConcern, type: :controller do
- controller(ApplicationController) do
- include Gitlab::Experimentation::ControllerConcern
-
- def index
- head :ok
- end
- end
-
- describe '#set_experimentation_subject_id_cookie' do
- let(:do_not_track) { nil }
- let(:cookie) { cookies.permanent.signed[:experimentation_subject_id] }
-
- before do
- request.headers['DNT'] = do_not_track if do_not_track.present?
-
- get :index
- end
-
- context 'cookie is present' do
- before do
- cookies[:experimentation_subject_id] = 'test'
- end
-
- it 'does not change the cookie' do
- expect(cookies[:experimentation_subject_id]).to eq 'test'
- end
- end
-
- context 'cookie is not present' do
- it 'sets a permanent signed cookie' do
- expect(cookie).to be_present
- end
-
- context 'DNT: 0' do
- let(:do_not_Track) { '0' }
-
- it 'sets a permanent signed cookie' do
- expect(cookie).to be_present
- end
- end
-
- context 'DNT: 1' do
- let(:do_not_track) { '1' }
-
- it 'does nothing' do
- expect(cookie).not_to be_present
- end
- end
- end
- end
-
- describe '#push_frontend_experiment' do
- it 'pushes an experiment to the frontend' do
- gon = instance_double('gon')
- experiments = { experiments: { 'myExperiment' => true } }
-
- stub_experiment_for_user(my_experiment: true)
- allow(controller).to receive(:gon).and_return(gon)
-
- expect(gon).to receive(:push).with(experiments, true)
-
- controller.push_frontend_experiment(:my_experiment)
- end
- end
-
- describe '#experiment_enabled?' do
- def check_experiment(exp_key = :test_experiment)
- controller.experiment_enabled?(exp_key)
- end
-
- subject { check_experiment }
-
- context 'cookie is not present' do
- it 'calls Gitlab::Experimentation.enabled_for_value? with the name of the experiment and an experimentation_subject_index of nil' do
- expect(Gitlab::Experimentation).to receive(:enabled_for_value?).with(:test_experiment, nil)
- check_experiment
- end
- end
-
- context 'cookie is present' do
- using RSpec::Parameterized::TableSyntax
-
- before do
- cookies.permanent.signed[:experimentation_subject_id] = 'abcd-1234'
- get :index
- end
-
- where(:experiment_key, :index_value) do
- :test_experiment | 40 # Zlib.crc32('test_experimentabcd-1234') % 100 = 40
- :backwards_compatible_test_experiment | 76 # 'abcd1234'.hex % 100 = 76
- end
-
- with_them do
- it 'calls Gitlab::Experimentation.enabled_for_value? with the name of the experiment and the calculated experimentation_subject_index based on the uuid' do
- expect(Gitlab::Experimentation).to receive(:enabled_for_value?).with(experiment_key, index_value)
- check_experiment(experiment_key)
- end
- end
- end
-
- it 'returns true when DNT: 0 is set in the request' do
- allow(Gitlab::Experimentation).to receive(:enabled_for_value?) { true }
- controller.request.headers['DNT'] = '0'
-
- is_expected.to be_truthy
- end
-
- it 'returns false when DNT: 1 is set in the request' do
- allow(Gitlab::Experimentation).to receive(:enabled_for_value?) { true }
- controller.request.headers['DNT'] = '1'
-
- is_expected.to be_falsy
- end
-
- describe 'URL parameter to force enable experiment' do
- it 'returns true unconditionally' do
- get :index, params: { force_experiment: :test_experiment }
-
- is_expected.to be_truthy
- end
- end
- end
-
- describe '#track_experiment_event' do
- context 'when the experiment is enabled' do
- before do
- stub_experiment(test_experiment: true)
- end
-
- context 'the user is part of the experimental group' do
- before do
- stub_experiment_for_user(test_experiment: true)
- end
-
- it 'tracks the event with the right parameters' do
- controller.track_experiment_event(:test_experiment, 'start', 1)
-
- expect_snowplow_event(
- category: 'Team',
- action: 'start',
- property: 'experimental_group',
- value: 1
- )
- end
- end
-
- context 'the user is part of the control group' do
- before do
- stub_experiment_for_user(test_experiment: false)
- end
-
- it 'tracks the event with the right parameters' do
- controller.track_experiment_event(:test_experiment, 'start', 1)
-
- expect_snowplow_event(
- category: 'Team',
- action: 'start',
- property: 'control_group',
- value: 1
- )
- end
- end
-
- context 'do not track is disabled' do
- before do
- request.headers['DNT'] = '0'
- end
-
- it 'does track the event' do
- controller.track_experiment_event(:test_experiment, 'start', 1)
-
- expect_snowplow_event(
- category: 'Team',
- action: 'start',
- property: 'control_group',
- value: 1
- )
- end
- end
-
- context 'do not track enabled' do
- before do
- request.headers['DNT'] = '1'
- end
-
- it 'does not track the event' do
- controller.track_experiment_event(:test_experiment, 'start', 1)
-
- expect_no_snowplow_event
- end
- end
- end
-
- context 'when the experiment is disabled' do
- before do
- stub_experiment(test_experiment: false)
- end
-
- it 'does not track the event' do
- controller.track_experiment_event(:test_experiment, 'start')
-
- expect_no_snowplow_event
- end
- end
- end
-
- describe '#frontend_experimentation_tracking_data' do
- context 'when the experiment is enabled' do
- before do
- stub_experiment(test_experiment: true)
- end
-
- context 'the user is part of the experimental group' do
- before do
- stub_experiment_for_user(test_experiment: true)
- end
-
- it 'pushes the right parameters to gon' do
- controller.frontend_experimentation_tracking_data(:test_experiment, 'start', 'team_id')
- expect(Gon.tracking_data).to eq(
- {
- category: 'Team',
- action: 'start',
- property: 'experimental_group',
- value: 'team_id'
- }
- )
- end
- end
-
- context 'the user is part of the control group' do
- before do
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
- end
- end
-
- it 'pushes the right parameters to gon' do
- controller.frontend_experimentation_tracking_data(:test_experiment, 'start', 'team_id')
- expect(Gon.tracking_data).to eq(
- {
- category: 'Team',
- action: 'start',
- property: 'control_group',
- value: 'team_id'
- }
- )
- end
-
- it 'does not send nil value to gon' do
- controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
- expect(Gon.tracking_data).to eq(
- {
- category: 'Team',
- action: 'start',
- property: 'control_group'
- }
- )
- end
- end
-
- context 'do not track disabled' do
- before do
- request.headers['DNT'] = '0'
- end
-
- it 'pushes the right parameters to gon' do
- controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
-
- expect(Gon.tracking_data).to eq(
- {
- category: 'Team',
- action: 'start',
- property: 'control_group'
- }
- )
- end
- end
-
- context 'do not track enabled' do
- before do
- request.headers['DNT'] = '1'
- end
-
- it 'does not push data to gon' do
- controller.frontend_experimentation_tracking_data(:test_experiment, 'start')
-
- expect(Gon.method_defined?(:tracking_data)).to be_falsey
- end
- end
- end
-
- context 'when the experiment is disabled' do
- before do
- stub_experiment(test_experiment: false)
- end
-
- it 'does not push data to gon' do
- expect(Gon.method_defined?(:tracking_data)).to be_falsey
- controller.track_experiment_event(:test_experiment, 'start')
- end
- end
- end
-
- describe '#record_experiment_user' do
- let(:user) { build(:user) }
-
- context 'when the experiment is enabled' do
- before do
- stub_experiment(test_experiment: true)
- allow(controller).to receive(:current_user).and_return(user)
- end
-
- context 'the user is part of the experimental group' do
- before do
- stub_experiment_for_user(test_experiment: true)
- end
-
- it 'calls add_user on the Experiment model' do
- expect(::Experiment).to receive(:add_user).with(:test_experiment, :experimental, user)
-
- controller.record_experiment_user(:test_experiment)
- end
- end
-
- context 'the user is part of the control group' do
- before do
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
- end
- end
-
- it 'calls add_user on the Experiment model' do
- expect(::Experiment).to receive(:add_user).with(:test_experiment, :control, user)
-
- controller.record_experiment_user(:test_experiment)
- end
- end
- end
-
- context 'when the experiment is disabled' do
- before do
- stub_experiment(test_experiment: false)
- allow(controller).to receive(:current_user).and_return(user)
- end
-
- it 'does not call add_user on the Experiment model' do
- expect(::Experiment).not_to receive(:add_user)
-
- controller.record_experiment_user(:test_experiment)
- end
- end
-
- context 'when there is no current_user' do
- before do
- stub_experiment(test_experiment: true)
- end
-
- it 'does not call add_user on the Experiment model' do
- expect(::Experiment).not_to receive(:add_user)
-
- controller.record_experiment_user(:test_experiment)
- end
- end
-
- context 'do not track' do
- before do
- allow(controller).to receive(:current_user).and_return(user)
- allow_next_instance_of(described_class) do |instance|
- allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
- end
- end
-
- context 'is disabled' do
- before do
- request.headers['DNT'] = '0'
- end
-
- it 'calls add_user on the Experiment model' do
- expect(::Experiment).to receive(:add_user).with(:test_experiment, :control, user)
-
- controller.record_experiment_user(:test_experiment)
- end
- end
-
- context 'is enabled' do
- before do
- request.headers['DNT'] = '1'
- end
-
- it 'does not call add_user on the Experiment model' do
- expect(::Experiment).not_to receive(:add_user)
-
- controller.record_experiment_user(:test_experiment)
- end
- end
- end
- end
-
- describe '#experiment_tracking_category_and_group' do
- let_it_be(:experiment_key) { :test_something }
-
- subject { controller.experiment_tracking_category_and_group(experiment_key) }
-
- it 'returns a string with the experiment tracking category & group joined with a ":"' do
- expect(controller).to receive(:tracking_category).with(experiment_key).and_return('Experiment::Category')
- expect(controller).to receive(:tracking_group).with(experiment_key, '_group').and_return('experimental_group')
-
- expect(subject).to eq('Experiment::Category:experimental_group')
- end
- end
- end
-
describe '.enabled?' do
subject { described_class.enabled?(:test_experiment) }
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 8b254e82a92..da5e1f14ee0 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -774,6 +774,7 @@ ExternalPullRequest:
- target_sha
DesignManagement::Design:
- id
+- iid
- project_id
- filename
- relative_position
diff --git a/spec/models/concerns/atomic_internal_id_spec.rb b/spec/models/concerns/atomic_internal_id_spec.rb
index 8c3537f1dcc..5ee3c012dc9 100644
--- a/spec/models/concerns/atomic_internal_id_spec.rb
+++ b/spec/models/concerns/atomic_internal_id_spec.rb
@@ -86,4 +86,20 @@ RSpec.describe AtomicInternalId do
expect { subject }.to change { milestone.iid }.from(nil).to(iid.to_i)
end
end
+
+ describe '.with_project_iid_supply' do
+ let(:iid) { 100 }
+
+ it 'wraps generate and track_greatest in a concurrency-safe lock' do
+ expect_next_instance_of(InternalId::InternalIdGenerator) do |g|
+ expect(g).to receive(:with_lock).and_call_original
+ expect(g.record).to receive(:last_value).and_return(iid)
+ expect(g).to receive(:track_greatest).with(iid + 4)
+ end
+
+ ::Milestone.with_project_iid_supply(milestone.project) do |supply|
+ 4.times { supply.next_value }
+ end
+ end
+ end
end
diff --git a/spec/models/design_management/design_spec.rb b/spec/models/design_management/design_spec.rb
index 833f32abfcc..946541a0602 100644
--- a/spec/models/design_management/design_spec.rb
+++ b/spec/models/design_management/design_spec.rb
@@ -11,6 +11,14 @@ RSpec.describe DesignManagement::Design do
let_it_be(:design3) { create(:design, :with_versions, issue: issue, versions_count: 1) }
let_it_be(:deleted_design) { create(:design, :with_versions, deleted: true) }
+ it_behaves_like 'AtomicInternalId', validate_presence: true do
+ let(:internal_id_attribute) { :iid }
+ let(:instance) { build(:design, issue: issue) }
+ let(:scope) { :project }
+ let(:scope_attrs) { { project: instance.project } }
+ let(:usage) { :design_management_designs }
+ end
+
it_behaves_like 'a class that supports relative positioning' do
let_it_be(:relative_parent) { create(:issue) }
diff --git a/spec/models/instance_metadata_spec.rb b/spec/models/instance_metadata_spec.rb
new file mode 100644
index 00000000000..1835dc8a9af
--- /dev/null
+++ b/spec/models/instance_metadata_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe InstanceMetadata do
+ it 'has the correct properties' do
+ expect(subject).to have_attributes(
+ version: Gitlab::VERSION,
+ revision: Gitlab.revision
+ )
+ end
+end
diff --git a/spec/models/internal_id_spec.rb b/spec/models/internal_id_spec.rb
index 751e8724872..07f62b9de55 100644
--- a/spec/models/internal_id_spec.rb
+++ b/spec/models/internal_id_spec.rb
@@ -6,8 +6,9 @@ RSpec.describe InternalId do
let(:project) { create(:project) }
let(:usage) { :issues }
let(:issue) { build(:issue, project: project) }
+ let(:id_subject) { issue }
let(:scope) { { project: project } }
- let(:init) { ->(s) { s.project.issues.size } }
+ let(:init) { ->(issue, scope) { issue&.project&.issues&.size || Issue.where(**scope).count } }
it_behaves_like 'having unique enum values'
@@ -39,7 +40,7 @@ RSpec.describe InternalId do
end
describe '.generate_next' do
- subject { described_class.generate_next(issue, scope, usage, init) }
+ subject { described_class.generate_next(id_subject, scope, usage, init) }
context 'in the absence of a record' do
it 'creates a record if not yet present' do
@@ -88,6 +89,14 @@ RSpec.describe InternalId do
expect(normalized).to eq((0..seq.size - 1).to_a)
end
+
+ context 'there are no instances to pass in' do
+ let(:id_subject) { Issue }
+
+ it 'accepts classes instead' do
+ expect(subject).to eq(1)
+ end
+ end
end
describe '.reset' do
@@ -130,7 +139,7 @@ RSpec.describe InternalId do
describe '.track_greatest' do
let(:value) { 9001 }
- subject { described_class.track_greatest(issue, scope, usage, value, init) }
+ subject { described_class.track_greatest(id_subject, scope, usage, value, init) }
context 'in the absence of a record' do
it 'creates a record if not yet present' do
@@ -166,6 +175,14 @@ RSpec.describe InternalId do
expect(subject).to eq 10_001
end
end
+
+ context 'there are no instances to pass in' do
+ let(:id_subject) { Issue }
+
+ it 'accepts classes instead' do
+ expect(subject).to eq(value)
+ end
+ end
end
describe '#increment_and_save!' do
diff --git a/spec/policies/instance_metadata_policy_spec.rb b/spec/policies/instance_metadata_policy_spec.rb
new file mode 100644
index 00000000000..2c8e18483e6
--- /dev/null
+++ b/spec/policies/instance_metadata_policy_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe InstanceMetadataPolicy do
+ subject { described_class.new(user, InstanceMetadata.new) }
+
+ context 'for any logged-in user' do
+ let(:user) { create(:user) }
+
+ specify { expect_allowed(:read_instance_metadata) }
+ end
+
+ context 'for anonymous users' do
+ let(:user) { nil }
+
+ specify { expect_disallowed(:read_instance_metadata) }
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/releases/create_spec.rb b/spec/requests/api/graphql/mutations/releases/create_spec.rb
new file mode 100644
index 00000000000..2402cf62a49
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/releases/create_spec.rb
@@ -0,0 +1,375 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Creation of a new release' do
+ include GraphqlHelpers
+ include Presentable
+
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:milestone_12_3) { create(:milestone, project: project, title: '12.3') }
+ let_it_be(:milestone_12_4) { create(:milestone, project: project, title: '12.4') }
+ let_it_be(:public_user) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+
+ let(:mutation_name) { :release_create }
+
+ let(:tag_name) { 'v7.12.5'}
+ let(:ref) { 'master'}
+ let(:name) { 'Version 7.12.5'}
+ let(:description) { 'Release 7.12.5 :rocket:' }
+ let(:released_at) { '2018-12-10' }
+ let(:milestones) { [milestone_12_3.title, milestone_12_4.title] }
+ let(:asset_link) { { name: 'An asset link', url: 'https://gitlab.example.com/link', directAssetPath: '/permanent/link', linkType: 'OTHER' } }
+ let(:assets) { { links: [asset_link] } }
+
+ let(:mutation_arguments) do
+ {
+ projectPath: project.full_path,
+ tagName: tag_name,
+ ref: ref,
+ name: name,
+ description: description,
+ releasedAt: released_at,
+ milestones: milestones,
+ assets: assets
+ }
+ end
+
+ let(:mutation) do
+ graphql_mutation(mutation_name, mutation_arguments, <<~FIELDS)
+ release {
+ tagName
+ name
+ description
+ releasedAt
+ createdAt
+ milestones {
+ nodes {
+ title
+ }
+ }
+ assets {
+ links {
+ nodes {
+ name
+ url
+ linkType
+ external
+ directAssetUrl
+ }
+ }
+ }
+ }
+ errors
+ FIELDS
+ end
+
+ let(:create_release) { post_graphql_mutation(mutation, current_user: current_user) }
+ let(:mutation_response) { graphql_mutation_response(mutation_name)&.with_indifferent_access }
+
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ before do
+ project.add_guest(guest)
+ project.add_reporter(reporter)
+ project.add_developer(developer)
+
+ stub_default_url_options(host: 'www.example.com')
+ end
+
+ shared_examples 'no errors' do
+ it 'returns no errors' do
+ create_release
+
+ expect(graphql_errors).not_to be_present
+ end
+ end
+
+ shared_examples 'top-level error with message' do |error_message|
+ it 'returns a top-level error with message' do
+ create_release
+
+ expect(mutation_response).to be_nil
+ expect(graphql_errors.count).to eq(1)
+ expect(graphql_errors.first['message']).to eq(error_message)
+ end
+ end
+
+ shared_examples 'errors-as-data with message' do |error_message|
+ it 'returns an error-as-data with message' do
+ create_release
+
+ expect(mutation_response[:release]).to be_nil
+ expect(mutation_response[:errors].count).to eq(1)
+ expect(mutation_response[:errors].first).to match(error_message)
+ end
+ end
+
+ context 'when the current user has access to create releases' do
+ let(:current_user) { developer }
+
+ context 'when all available mutation arguments are provided' do
+ it_behaves_like 'no errors'
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ it 'returns the new release data' do
+ create_release
+
+ release = mutation_response[:release]
+ expected_direct_asset_url = Gitlab::Routing.url_helpers.project_release_url(project, Release.find_by(tag: tag_name)) << asset_link[:directAssetPath]
+
+ expected_attributes = {
+ tagName: tag_name,
+ name: name,
+ description: description,
+ releasedAt: Time.parse(released_at).utc.iso8601,
+ createdAt: Time.current.utc.iso8601,
+ assets: {
+ links: {
+ nodes: [{
+ name: asset_link[:name],
+ url: asset_link[:url],
+ linkType: asset_link[:linkType],
+ external: true,
+ directAssetUrl: expected_direct_asset_url
+ }]
+ }
+ }
+ }
+
+ expect(release).to include(expected_attributes)
+
+ # Right now the milestones are returned in a non-deterministic order.
+ # This `milestones` test should be moved up into the expect(release)
+ # above (and `.to include` updated to `.to eq`) once
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/259012 is addressed.
+ expect(release['milestones']['nodes']).to match_array([
+ { 'title' => '12.4' },
+ { 'title' => '12.3' }
+ ])
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+
+ context 'when only the required mutation arguments are provided' do
+ let(:mutation_arguments) { super().slice(:projectPath, :tagName, :ref) }
+
+ it_behaves_like 'no errors'
+
+ it 'returns the new release data' do
+ create_release
+
+ expected_response = {
+ tagName: tag_name,
+ name: tag_name,
+ description: nil,
+ releasedAt: Time.current.utc.iso8601,
+ createdAt: Time.current.utc.iso8601,
+ milestones: {
+ nodes: []
+ },
+ assets: {
+ links: {
+ nodes: []
+ }
+ }
+ }.with_indifferent_access
+
+ expect(mutation_response[:release]).to eq(expected_response)
+ end
+ end
+
+ context 'when the provided tag already exists' do
+ let(:tag_name) { 'v1.1.0' }
+
+ it_behaves_like 'no errors'
+
+ it 'does not create a new tag' do
+ expect { create_release }.not_to change { Project.find_by_id(project.id).repository.tag_count }
+ end
+ end
+
+ context 'when the provided tag does not already exist' do
+ let(:tag_name) { 'v7.12.5-alpha' }
+
+ it_behaves_like 'no errors'
+
+ it 'creates a new tag' do
+ expect { create_release }.to change { Project.find_by_id(project.id).repository.tag_count }.by(1)
+ end
+ end
+
+ context 'when a local timezone is provided for releasedAt' do
+ let(:released_at) { Time.parse(super()).in_time_zone('Hawaii').iso8601 }
+
+ it_behaves_like 'no errors'
+
+ it 'returns the correct releasedAt date in UTC' do
+ create_release
+
+ expect(mutation_response[:release]).to include({ releasedAt: Time.parse(released_at).utc.iso8601 })
+ end
+ end
+
+ context 'when no releasedAt is provided' do
+ let(:mutation_arguments) { super().except(:releasedAt) }
+
+ it_behaves_like 'no errors'
+
+ it 'sets releasedAt to the current time' do
+ create_release
+
+ expect(mutation_response[:release]).to include({ releasedAt: Time.current.utc.iso8601 })
+ end
+ end
+
+ context "when a release asset doesn't include an explicit linkType" do
+ let(:asset_link) { super().except(:linkType) }
+
+ it_behaves_like 'no errors'
+
+ it 'defaults the linkType to OTHER' do
+ create_release
+
+ returned_asset_link_type = mutation_response.dig(:release, :assets, :links, :nodes, 0, :linkType)
+
+ expect(returned_asset_link_type).to eq('OTHER')
+ end
+ end
+
+ context "when a release asset doesn't include a directAssetPath" do
+ let(:asset_link) { super().except(:directAssetPath) }
+
+ it_behaves_like 'no errors'
+
+ it 'returns the provided url as the directAssetUrl' do
+ create_release
+
+ returned_asset_link_type = mutation_response.dig(:release, :assets, :links, :nodes, 0, :directAssetUrl)
+
+ expect(returned_asset_link_type).to eq(asset_link[:url])
+ end
+ end
+
+ context 'empty milestones' do
+ shared_examples 'no associated milestones' do
+ it_behaves_like 'no errors'
+
+ it 'creates a release with no associated milestones' do
+ create_release
+
+ returned_milestones = mutation_response.dig(:release, :milestones, :nodes)
+
+ expect(returned_milestones.count).to eq(0)
+ end
+ end
+
+ context 'when the milestones parameter is not provided' do
+ let(:mutation_arguments) { super().except(:milestones) }
+
+ it_behaves_like 'no associated milestones'
+ end
+
+ context 'when the milestones parameter is null' do
+ let(:milestones) { nil }
+
+ it_behaves_like 'no associated milestones'
+ end
+
+ context 'when the milestones parameter is an empty array' do
+ let(:milestones) { [] }
+
+ it_behaves_like 'no associated milestones'
+ end
+ end
+
+ context 'validation' do
+ context 'when a release is already associated to the specified tag' do
+ before do
+ create(:release, project: project, tag: tag_name)
+ end
+
+ it_behaves_like 'errors-as-data with message', 'Release already exists'
+ end
+
+ context "when a provided milestone doesn\'t exist" do
+ let(:milestones) { ['a fake milestone'] }
+
+ it_behaves_like 'errors-as-data with message', 'Milestone(s) not found: a fake milestone'
+ end
+
+ context "when a provided milestone belongs to a different project than the release" do
+ let(:milestone_in_different_project) { create(:milestone, title: 'different milestone') }
+ let(:milestones) { [milestone_in_different_project.title] }
+
+ it_behaves_like 'errors-as-data with message', "Milestone(s) not found: different milestone"
+ end
+
+ context 'when two release assets share the same name' do
+ let(:asset_link_1) { { name: 'My link', url: 'https://example.com/1' } }
+ let(:asset_link_2) { { name: 'My link', url: 'https://example.com/2' } }
+ let(:assets) { { links: [asset_link_1, asset_link_2] } }
+
+ # Right now the raw Postgres error message is sent to the user as the validation message.
+ # We should catch this validation error and return a nicer message:
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/277087
+ it_behaves_like 'errors-as-data with message', 'PG::UniqueViolation'
+ end
+
+ context 'when two release assets share the same URL' do
+ let(:asset_link_1) { { name: 'My first link', url: 'https://example.com' } }
+ let(:asset_link_2) { { name: 'My second link', url: 'https://example.com' } }
+ let(:assets) { { links: [asset_link_1, asset_link_2] } }
+
+ # Same note as above about the ugly error message
+ it_behaves_like 'errors-as-data with message', 'PG::UniqueViolation'
+ end
+
+ context 'when the provided tag name is HEAD' do
+ let(:tag_name) { 'HEAD' }
+
+ it_behaves_like 'errors-as-data with message', 'Tag name invalid'
+ end
+
+ context 'when the provided tag name is empty' do
+ let(:tag_name) { '' }
+
+ it_behaves_like 'errors-as-data with message', 'Tag name invalid'
+ end
+
+ context "when the provided tag doesn't already exist, and no ref parameter was provided" do
+ let(:ref) { nil }
+ let(:tag_name) { 'v7.12.5-beta' }
+
+ it_behaves_like 'errors-as-data with message', 'Ref is not specified'
+ end
+ end
+ end
+
+ context "when the current user doesn't have access to create releases" do
+ expected_error_message = "The resource that you are attempting to access does not exist or you don't have permission to perform this action"
+
+ context 'when the current user is a Reporter' do
+ let(:current_user) { reporter }
+
+ it_behaves_like 'top-level error with message', expected_error_message
+ end
+
+ context 'when the current user is a Guest' do
+ let(:current_user) { guest }
+
+ it_behaves_like 'top-level error with message', expected_error_message
+ end
+
+ context 'when the current user is a public user' do
+ let(:current_user) { public_user }
+
+ it_behaves_like 'top-level error with message', expected_error_message
+ end
+ end
+end
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 31bb0586e9f..48d125a37c3 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -9,18 +9,17 @@ RSpec.describe 'Git LFS API and storage' do
let_it_be(:project, reload: true) { create(:project, :repository) }
let_it_be(:other_project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
- let!(:lfs_object) { create(:lfs_object, :with_file) }
+ let(:lfs_object) { create(:lfs_object, :with_file) }
let(:headers) do
{
'Authorization' => authorization,
- 'X-Sendfile-Type' => sendfile
+ 'X-Sendfile-Type' => 'X-Sendfile'
}.compact
end
let(:include_workhorse_jwt_header) { true }
let(:authorization) { }
- let(:sendfile) { }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
let(:sample_oid) { lfs_object.oid }
@@ -37,18 +36,6 @@ RSpec.describe 'Git LFS API and storage' do
stub_lfs_setting(enabled: lfs_enabled)
end
- describe 'when LFS is disabled' do
- let(:lfs_enabled) { false }
- let(:body) { upload_body(multiple_objects) }
- let(:authorization) { authorize_user }
-
- before do
- post_lfs_json batch_url(project), body, headers
- end
-
- it_behaves_like 'LFS http 501 response'
- end
-
context 'project specific LFS settings' do
let(:body) { upload_body(sample_object) }
let(:authorization) { authorize_user }
@@ -60,303 +47,171 @@ RSpec.describe 'Git LFS API and storage' do
subject
end
- context 'with LFS disabled globally' do
- let(:lfs_enabled) { false }
+ describe 'LFS disabled in project' do
+ let(:project_lfs_enabled) { false }
- describe 'LFS disabled in project' do
- let(:project_lfs_enabled) { false }
+ context 'when uploading' do
+ subject { post_lfs_json(batch_url(project), body, headers) }
- context 'when uploading' do
- subject { post_lfs_json(batch_url(project), body, headers) }
-
- it_behaves_like 'LFS http 501 response'
- end
-
- context 'when downloading' do
- subject { get(objects_url(project, sample_oid), params: {}, headers: headers) }
-
- it_behaves_like 'LFS http 501 response'
- end
+ it_behaves_like 'LFS http 404 response'
end
- describe 'LFS enabled in project' do
- let(:project_lfs_enabled) { true }
+ context 'when downloading' do
+ subject { get(objects_url(project, sample_oid), params: {}, headers: headers) }
- context 'when uploading' do
- subject { post_lfs_json(batch_url(project), body, headers) }
-
- it_behaves_like 'LFS http 501 response'
- end
-
- context 'when downloading' do
- subject { get(objects_url(project, sample_oid), params: {}, headers: headers) }
-
- it_behaves_like 'LFS http 501 response'
- end
+ it_behaves_like 'LFS http 404 response'
end
end
- context 'with LFS enabled globally' do
- describe 'LFS disabled in project' do
- let(:project_lfs_enabled) { false }
+ describe 'LFS enabled in project' do
+ let(:project_lfs_enabled) { true }
- context 'when uploading' do
- subject { post_lfs_json(batch_url(project), body, headers) }
+ context 'when uploading' do
+ subject { post_lfs_json(batch_url(project), body, headers) }
- it_behaves_like 'LFS http 403 response'
- end
-
- context 'when downloading' do
- subject { get(objects_url(project, sample_oid), params: {}, headers: headers) }
-
- it_behaves_like 'LFS http 403 response'
- end
+ it_behaves_like 'LFS http 200 response'
end
- describe 'LFS enabled in project' do
- let(:project_lfs_enabled) { true }
+ context 'when downloading' do
+ subject { get(objects_url(project, sample_oid), params: {}, headers: headers) }
- context 'when uploading' do
- subject { post_lfs_json(batch_url(project), body, headers) }
-
- it_behaves_like 'LFS http 200 response'
- end
-
- context 'when downloading' do
- subject { get(objects_url(project, sample_oid), params: {}, headers: headers) }
-
- it_behaves_like 'LFS http 200 response'
- end
+ it_behaves_like 'LFS http 200 blob response'
end
end
end
- describe 'deprecated API' do
- let(:authorization) { authorize_user }
-
- shared_examples 'deprecated request' do
- before do
- subject
- end
-
- it_behaves_like 'LFS http expected response code and message' do
- let(:response_code) { 501 }
- let(:message) { 'Server supports batch API only, please update your Git LFS client to version 1.0.1 and up.' }
- end
- end
-
- context 'when fetching LFS object using deprecated API' do
- subject { get(deprecated_objects_url(project, sample_oid), params: {}, headers: headers) }
-
- it_behaves_like 'deprecated request'
- end
-
- context 'when handling LFS request using deprecated API' do
- subject { post_lfs_json(deprecated_objects_url(project), nil, headers) }
-
- it_behaves_like 'deprecated request'
- end
-
- def deprecated_objects_url(project, oid = nil)
- File.join(["#{project.http_url_to_repo}/info/lfs/objects/", oid].compact)
- end
- end
-
describe 'when fetching LFS object' do
let(:update_permissions) { }
let(:before_get) { }
before do
+ project.lfs_objects << lfs_object
update_permissions
before_get
+
get objects_url(project, sample_oid), params: {}, headers: headers
end
- context 'and request comes from gitlab-workhorse' do
- context 'without user being authorized' do
- it_behaves_like 'LFS http 401 response'
+ context 'when LFS uses object storage' do
+ let(:authorization) { authorize_user }
+
+ let(:update_permissions) do
+ project.add_maintainer(user)
end
- context 'with required headers' do
- shared_examples 'responds with a file' do
- let(:sendfile) { 'X-Sendfile' }
-
- it_behaves_like 'LFS http 200 response'
-
- it 'responds with the file location' do
- expect(response.headers['Content-Type']).to eq('application/octet-stream')
- expect(response.headers['X-Sendfile']).to eq(lfs_object.file.path)
- end
+ context 'when proxy download is enabled' do
+ let(:before_get) do
+ stub_lfs_object_storage(proxy_download: true)
+ lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
- context 'with user is authorized' do
- let(:authorization) { authorize_user }
+ it 'responds with the workhorse send-url' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:")
+ end
+ end
- context 'and does not have project access' do
- let(:update_permissions) do
- project.lfs_objects << lfs_object
- end
-
- it_behaves_like 'LFS http 404 response'
- end
-
- context 'and does have project access' do
- let(:update_permissions) do
- project.add_maintainer(user)
- project.lfs_objects << lfs_object
- end
-
- it_behaves_like 'responds with a file'
-
- context 'when LFS uses object storage' do
- context 'when proxy download is enabled' do
- let(:before_get) do
- stub_lfs_object_storage(proxy_download: true)
- lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
- end
-
- it_behaves_like 'LFS http 200 response'
-
- it 'responds with the workhorse send-url' do
- expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:")
- end
- end
-
- context 'when proxy download is disabled' do
- let(:before_get) do
- stub_lfs_object_storage(proxy_download: false)
- lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
- end
-
- it 'responds with redirect' do
- expect(response).to have_gitlab_http_status(:found)
- end
-
- it 'responds with the file location' do
- expect(response.location).to include(lfs_object.reload.file.path)
- end
- end
- end
- end
+ context 'when proxy download is disabled' do
+ let(:before_get) do
+ stub_lfs_object_storage(proxy_download: false)
+ lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
- context 'when deploy key is authorized' do
- let(:key) { create(:deploy_key) }
- let(:authorization) { authorize_deploy_key }
+ it 'responds with redirect' do
+ expect(response).to have_gitlab_http_status(:found)
+ end
+
+ it 'responds with the file location' do
+ expect(response.location).to include(lfs_object.reload.file.path)
+ end
+ end
+ end
+
+ context 'when deploy key is authorized' do
+ let(:key) { create(:deploy_key) }
+ let(:authorization) { authorize_deploy_key }
+
+ let(:update_permissions) do
+ project.deploy_keys << key
+ end
+
+ it_behaves_like 'LFS http 200 blob response'
+ end
+
+ context 'when using a user key (LFSToken)' do
+ let(:authorization) { authorize_user_key }
+
+ context 'when user allowed' do
+ let(:update_permissions) do
+ project.add_maintainer(user)
+ end
+
+ it_behaves_like 'LFS http 200 blob response'
+
+ context 'when user password is expired' do
+ let(:user) { create(:user, password_expires_at: 1.minute.ago)}
+
+ it_behaves_like 'LFS http 401 response'
+ end
+
+ context 'when user is blocked' do
+ let(:user) { create(:user, :blocked)}
+
+ it_behaves_like 'LFS http 401 response'
+ end
+ end
+
+ context 'when user not allowed' do
+ it_behaves_like 'LFS http 404 response'
+ end
+ end
+
+ context 'when build is authorized as' do
+ let(:authorization) { authorize_ci_project }
+
+ shared_examples 'can download LFS only from own projects' do
+ context 'for owned project' do
+ let(:project) { create(:project, namespace: user.namespace) }
+
+ it_behaves_like 'LFS http 200 blob response'
+ end
+
+ context 'for member of project' do
+ let(:pipeline) { create(:ci_empty_pipeline, project: project) }
let(:update_permissions) do
- project.deploy_keys << key
- project.lfs_objects << lfs_object
+ project.add_reporter(user)
end
- it_behaves_like 'responds with a file'
+ it_behaves_like 'LFS http 200 blob response'
end
- describe 'when using a user key (LFSToken)' do
- let(:authorization) { authorize_user_key }
+ context 'for other project' do
+ let(:pipeline) { create(:ci_empty_pipeline, project: other_project) }
- context 'when user allowed' do
- let(:update_permissions) do
- project.add_maintainer(user)
- project.lfs_objects << lfs_object
- end
-
- it_behaves_like 'responds with a file'
-
- context 'when user password is expired' do
- let(:user) { create(:user, password_expires_at: 1.minute.ago)}
-
- it_behaves_like 'LFS http 401 response'
- end
-
- context 'when user is blocked' do
- let(:user) { create(:user, :blocked)}
-
- it_behaves_like 'LFS http 401 response'
- end
- end
-
- context 'when user not allowed' do
- let(:update_permissions) do
- project.lfs_objects << lfs_object
- end
-
- it_behaves_like 'LFS http 404 response'
- end
- end
-
- context 'when build is authorized as' do
- let(:authorization) { authorize_ci_project }
-
- shared_examples 'can download LFS only from own projects' do
- context 'for owned project' do
- let(:project) { create(:project, namespace: user.namespace) }
-
- let(:update_permissions) do
- project.lfs_objects << lfs_object
- end
-
- it_behaves_like 'responds with a file'
- end
-
- context 'for member of project' do
- let(:pipeline) { create(:ci_empty_pipeline, project: project) }
-
- let(:update_permissions) do
- project.add_reporter(user)
- project.lfs_objects << lfs_object
- end
-
- it_behaves_like 'responds with a file'
- end
-
- context 'for other project' do
- let(:pipeline) { create(:ci_empty_pipeline, project: other_project) }
-
- let(:update_permissions) do
- project.lfs_objects << lfs_object
- end
-
- it 'rejects downloading code' do
- expect(response).to have_gitlab_http_status(other_project_status)
- end
- end
- end
-
- context 'administrator' do
- let(:user) { create(:admin) }
- let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
-
- it_behaves_like 'can download LFS only from own projects' do
- # We render 403, because administrator does have normally access
- let(:other_project_status) { 403 }
- end
- end
-
- context 'regular user' do
- let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
-
- it_behaves_like 'can download LFS only from own projects' do
- # We render 404, to prevent data leakage about existence of the project
- let(:other_project_status) { 404 }
- end
- end
-
- context 'does not have user' do
- let(:build) { create(:ci_build, :running, pipeline: pipeline) }
-
- it_behaves_like 'can download LFS only from own projects' do
- # We render 404, to prevent data leakage about existence of the project
- let(:other_project_status) { 404 }
- end
+ it 'rejects downloading code' do
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
- context 'without required headers' do
- let(:authorization) { authorize_user }
+ context 'administrator' do
+ let(:user) { create(:admin) }
+ let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
- it_behaves_like 'LFS http 404 response'
+ it_behaves_like 'can download LFS only from own projects'
+ end
+
+ context 'regular user' do
+ let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
+
+ it_behaves_like 'can download LFS only from own projects'
+ end
+
+ context 'does not have user' do
+ let(:build) { create(:ci_build, :running, pipeline: pipeline) }
+
+ it_behaves_like 'can download LFS only from own projects'
end
end
end
@@ -511,7 +366,7 @@ RSpec.describe 'Git LFS API and storage' do
let(:role) { :reporter }
end
- context 'when user does is not member of the project' do
+ context 'when user is not a member of the project' do
let(:update_user_permissions) { nil }
it_behaves_like 'LFS http 404 response'
@@ -520,7 +375,7 @@ RSpec.describe 'Git LFS API and storage' do
context 'when user does not have download access' do
let(:role) { :guest }
- it_behaves_like 'LFS http 403 response'
+ it_behaves_like 'LFS http 404 response'
end
context 'when user password is expired' do
@@ -591,7 +446,7 @@ RSpec.describe 'Git LFS API and storage' do
let(:pipeline) { create(:ci_empty_pipeline, project: other_project) }
it 'rejects downloading code' do
- expect(response).to have_gitlab_http_status(other_project_status)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
@@ -600,28 +455,19 @@ RSpec.describe 'Git LFS API and storage' do
let(:user) { create(:admin) }
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
- it_behaves_like 'can download LFS only from own projects', renew_authorization: true do
- # We render 403, because administrator does have normally access
- let(:other_project_status) { 403 }
- end
+ it_behaves_like 'can download LFS only from own projects', renew_authorization: true
end
context 'regular user' do
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
- it_behaves_like 'can download LFS only from own projects', renew_authorization: true do
- # We render 404, to prevent data leakage about existence of the project
- let(:other_project_status) { 404 }
- end
+ it_behaves_like 'can download LFS only from own projects', renew_authorization: true
end
context 'does not have user' do
let(:build) { create(:ci_build, :running, pipeline: pipeline) }
- it_behaves_like 'can download LFS only from own projects', renew_authorization: false do
- # We render 404, to prevent data leakage about existence of the project
- let(:other_project_status) { 404 }
- end
+ it_behaves_like 'can download LFS only from own projects', renew_authorization: false
end
end
@@ -919,11 +765,7 @@ RSpec.describe 'Git LFS API and storage' do
put_authorize
end
- it_behaves_like 'LFS http 200 response'
-
- it 'uses the gitlab-workhorse content type' do
- expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
- end
+ it_behaves_like 'LFS http 200 workhorse response'
end
shared_examples 'a local file' do
@@ -1142,7 +984,7 @@ RSpec.describe 'Git LFS API and storage' do
put_authorize
end
- it_behaves_like 'LFS http 404 response'
+ it_behaves_like 'LFS http 403 response'
end
end
@@ -1155,7 +997,7 @@ RSpec.describe 'Git LFS API and storage' do
put_authorize
end
- it_behaves_like 'LFS http 200 response'
+ it_behaves_like 'LFS http 200 workhorse response'
context 'when user password is expired' do
let(:user) { create(:user, password_expires_at: 1.minute.ago)}
@@ -1202,7 +1044,7 @@ RSpec.describe 'Git LFS API and storage' do
put_authorize
end
- it_behaves_like 'LFS http 200 response'
+ it_behaves_like 'LFS http 200 workhorse response'
it 'with location of LFS store and object details' do
expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path)
@@ -1330,4 +1172,50 @@ RSpec.describe 'Git LFS API and storage' do
"#{sample_oid}012345678"
end
end
+
+ context 'with projects' do
+ it_behaves_like 'LFS http requests' do
+ let(:container) { project }
+ let(:authorize_guest) { project.add_guest(user) }
+ let(:authorize_download) { project.add_reporter(user) }
+ let(:authorize_upload) { project.add_developer(user) }
+ end
+ end
+
+ context 'with project wikis' do
+ it_behaves_like 'LFS http requests' do
+ let(:container) { create(:project_wiki, :empty_repo, project: project) }
+ let(:authorize_guest) { project.add_guest(user) }
+ let(:authorize_download) { project.add_reporter(user) }
+ let(:authorize_upload) { project.add_developer(user) }
+ end
+ end
+
+ context 'with snippets' do
+ # LFS is not supported on snippets, so we override the shared examples
+ # to expect 404 responses instead.
+ [
+ 'LFS http 200 response',
+ 'LFS http 200 blob response',
+ 'LFS http 403 response'
+ ].each do |examples|
+ shared_examples_for(examples) { it_behaves_like 'LFS http 404 response' }
+ end
+
+ context 'with project snippets' do
+ it_behaves_like 'LFS http requests' do
+ let(:container) { create(:project_snippet, :empty_repo, project: project) }
+ let(:authorize_guest) { project.add_guest(user) }
+ let(:authorize_download) { project.add_reporter(user) }
+ let(:authorize_upload) { project.add_developer(user) }
+ end
+ end
+
+ context 'with personal snippets' do
+ it_behaves_like 'LFS http requests' do
+ let(:container) { create(:personal_snippet, :empty_repo) }
+ let(:authorize_upload) { container.update!(author: user) }
+ end
+ end
+ end
end
diff --git a/spec/requests/lfs_locks_api_spec.rb b/spec/requests/lfs_locks_api_spec.rb
index 34e345cb1cf..0eb3cb4ca07 100644
--- a/spec/requests/lfs_locks_api_spec.rb
+++ b/spec/requests/lfs_locks_api_spec.rb
@@ -3,24 +3,38 @@
require 'spec_helper'
RSpec.describe 'Git LFS File Locking API' do
+ include LfsHttpHelpers
include WorkhorseHelpers
- let(:project) { create(:project) }
- let(:maintainer) { create(:user) }
- let(:developer) { create(:user) }
- let(:guest) { create(:user) }
- let(:path) { 'README.md' }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:path) { 'README.md' }
+
+ let(:user) { developer }
let(:headers) do
{
- 'Authorization' => authorization
+ 'Authorization' => authorize_user
}.compact
end
shared_examples 'unauthorized request' do
- context 'when user is not authorized' do
- let(:authorization) { authorize_user(guest) }
+ context 'when user does not have download permission' do
+ let(:user) { guest }
- it 'returns a forbidden 403 response' do
+ it 'returns a 404 response' do
+ post_lfs_json url, body, headers
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when user does not have upload permission' do
+ let(:user) { reporter }
+
+ it 'returns a 403 response' do
post_lfs_json url, body, headers
expect(response).to have_gitlab_http_status(:forbidden)
@@ -31,15 +45,15 @@ RSpec.describe 'Git LFS File Locking API' do
before do
allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
- project.add_developer(maintainer)
+ project.add_maintainer(maintainer)
project.add_developer(developer)
+ project.add_reporter(reporter)
project.add_guest(guest)
end
describe 'Create File Lock endpoint' do
- let(:url) { "#{project.http_url_to_repo}/info/lfs/locks" }
- let(:authorization) { authorize_user(developer) }
- let(:body) { { path: path } }
+ let(:url) { "#{project.http_url_to_repo}/info/lfs/locks" }
+ let(:body) { { path: path } }
include_examples 'unauthorized request'
@@ -76,8 +90,7 @@ RSpec.describe 'Git LFS File Locking API' do
end
describe 'Listing File Locks endpoint' do
- let(:url) { "#{project.http_url_to_repo}/info/lfs/locks" }
- let(:authorization) { authorize_user(developer) }
+ let(:url) { "#{project.http_url_to_repo}/info/lfs/locks" }
include_examples 'unauthorized request'
@@ -95,8 +108,7 @@ RSpec.describe 'Git LFS File Locking API' do
end
describe 'List File Locks for verification endpoint' do
- let(:url) { "#{project.http_url_to_repo}/info/lfs/locks/verify" }
- let(:authorization) { authorize_user(developer) }
+ let(:url) { "#{project.http_url_to_repo}/info/lfs/locks/verify" }
include_examples 'unauthorized request'
@@ -116,9 +128,8 @@ RSpec.describe 'Git LFS File Locking API' do
end
describe 'Delete File Lock endpoint' do
- let!(:lock) { lock_file('README.md', developer) }
- let(:url) { "#{project.http_url_to_repo}/info/lfs/locks/#{lock[:id]}/unlock" }
- let(:authorization) { authorize_user(developer) }
+ let!(:lock) { lock_file('README.md', developer) }
+ let(:url) { "#{project.http_url_to_repo}/info/lfs/locks/#{lock[:id]}/unlock" }
include_examples 'unauthorized request'
@@ -136,7 +147,7 @@ RSpec.describe 'Git LFS File Locking API' do
end
context 'when a maintainer uses force' do
- let(:authorization) { authorize_user(maintainer) }
+ let(:user) { maintainer }
it 'deletes the lock' do
project.add_maintainer(maintainer)
@@ -154,14 +165,6 @@ RSpec.describe 'Git LFS File Locking API' do
result[:lock]
end
- def authorize_user(user)
- ActionController::HttpAuthentication::Basic.encode_credentials(user.username, user.password)
- end
-
- def post_lfs_json(url, body = nil, headers = nil)
- post(url, params: body.try(:to_json), headers: (headers || {}).merge('Content-Type' => LfsRequest::CONTENT_TYPE))
- end
-
def do_get(url, params = nil, headers = nil)
get(url, params: (params || {}), headers: (headers || {}).merge('Content-Type' => LfsRequest::CONTENT_TYPE))
end
diff --git a/spec/services/design_management/copy_design_collection/copy_service_spec.rb b/spec/services/design_management/copy_design_collection/copy_service_spec.rb
index e93e5f13fea..ddbed91815f 100644
--- a/spec/services/design_management/copy_design_collection/copy_service_spec.rb
+++ b/spec/services/design_management/copy_design_collection/copy_service_spec.rb
@@ -68,6 +68,31 @@ RSpec.describe DesignManagement::CopyDesignCollection::CopyService, :clean_gitla
include_examples 'service error', message: 'Target design collection already has designs'
end
+ context 'when target project already has designs' do
+ let!(:issue_x) { create(:issue, project: target_issue.project) }
+ let!(:existing) { create(:design, issue: issue_x, project: target_issue.project) }
+
+ let(:new_designs) do
+ target_issue.reset
+ target_issue.designs.where.not(id: existing.id)
+ end
+
+ it 'sets IIDs for new designs above existing ones' do
+ subject
+
+ expect(new_designs).to all(have_attributes(iid: (be > existing.iid)))
+ end
+
+ it 'does not allow for IID collisions' do
+ subject
+ create(:design, issue: issue_x, project: target_issue.project)
+
+ design_iids = target_issue.project.designs.map(&:id)
+
+ expect(design_iids).to match_array(design_iids.uniq)
+ end
+ end
+
include_examples 'service success'
it 'creates a design repository for the target project' do
@@ -162,9 +187,7 @@ RSpec.describe DesignManagement::CopyDesignCollection::CopyService, :clean_gitla
it 'copies the Git repository data', :aggregate_failures do
subject
- commit_shas = target_repository.commits('master', limit: 99).map(&:id)
-
- expect(commit_shas).to include(*target_issue.design_versions.ordered.pluck(:sha))
+ expect(commits_on_master(limit: 99)).to include(*target_issue.design_versions.ordered.pluck(:sha))
end
it 'creates a master branch if none previously existed' do
@@ -212,9 +235,7 @@ RSpec.describe DesignManagement::CopyDesignCollection::CopyService, :clean_gitla
issue_2 = create(:issue, project: target_issue.project)
create(:design, :with_file, issue: issue_2, project: target_issue.project)
- expect { subject }.not_to change {
- expect(target_repository.commits('master', limit: 10).size).to eq(1)
- }
+ expect { subject }.not_to change { commits_on_master }
end
it 'sets the design collection copy state' do
@@ -223,6 +244,10 @@ RSpec.describe DesignManagement::CopyDesignCollection::CopyService, :clean_gitla
expect(target_issue.design_collection).to be_copy_error
end
end
+
+ def commits_on_master(limit: 10)
+ target_repository.commits('master', limit: limit).map(&:id)
+ end
end
end
end
diff --git a/spec/support/helpers/lfs_http_helpers.rb b/spec/support/helpers/lfs_http_helpers.rb
index 0537b122040..199d5e70e32 100644
--- a/spec/support/helpers/lfs_http_helpers.rb
+++ b/spec/support/helpers/lfs_http_helpers.rb
@@ -31,16 +31,16 @@ module LfsHttpHelpers
post(url, params: params, headers: headers)
end
- def batch_url(project)
- "#{project.http_url_to_repo}/info/lfs/objects/batch"
+ def batch_url(container)
+ "#{container.http_url_to_repo}/info/lfs/objects/batch"
end
- def objects_url(project, oid = nil, size = nil)
- File.join(["#{project.http_url_to_repo}/gitlab-lfs/objects", oid, size].compact.map(&:to_s))
+ def objects_url(container, oid = nil, size = nil)
+ File.join(["#{container.http_url_to_repo}/gitlab-lfs/objects", oid, size].compact.map(&:to_s))
end
- def authorize_url(project, oid, size)
- File.join(objects_url(project, oid, size), 'authorize')
+ def authorize_url(container, oid, size)
+ File.join(objects_url(container, oid, size), 'authorize')
end
def download_body(objects)
diff --git a/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb b/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb
index 62d56f2e86e..fe99b1cacd9 100644
--- a/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb
+++ b/spec/support/shared_examples/models/atomic_internal_id_shared_examples.rb
@@ -76,6 +76,26 @@ RSpec.shared_examples 'AtomicInternalId' do |validate_presence: true|
end
end
+ describe 'supply of internal ids' do
+ let(:scope_value) { scope_attrs.each_value.first }
+ let(:method_name) { :"with_#{scope}_#{internal_id_attribute}_supply" }
+
+ it 'provides a persistent supply of IID values, sensitive to the current state' do
+ iid = rand(1..1000)
+ write_internal_id(iid)
+ instance.public_send(:"track_#{scope}_#{internal_id_attribute}!")
+
+ # Allocate 3 IID values
+ described_class.public_send(method_name, scope_value) do |supply|
+ 3.times { supply.next_value }
+ end
+
+ current_value = described_class.public_send(method_name, scope_value, &:current_value)
+
+ expect(current_value).to eq(iid + 3)
+ end
+ end
+
describe "#reset_scope_internal_id_attribute" do
it 'rewinds the allocated IID' do
expect { ensure_scope_attribute! }.not_to raise_error
diff --git a/spec/support/shared_examples/requests/lfs_http_shared_examples.rb b/spec/support/shared_examples/requests/lfs_http_shared_examples.rb
index 48c5a5933e6..4ae77179527 100644
--- a/spec/support/shared_examples/requests/lfs_http_shared_examples.rb
+++ b/spec/support/shared_examples/requests/lfs_http_shared_examples.rb
@@ -2,42 +2,252 @@
RSpec.shared_examples 'LFS http 200 response' do
it_behaves_like 'LFS http expected response code and message' do
- let(:response_code) { 200 }
+ let(:response_code) { :ok }
+ end
+end
+
+RSpec.shared_examples 'LFS http 200 blob response' do
+ it_behaves_like 'LFS http expected response code and message' do
+ let(:response_code) { :ok }
+ let(:content_type) { Repositories::LfsApiController::LFS_TRANSFER_CONTENT_TYPE }
+ let(:response_headers) { { 'X-Sendfile' => lfs_object.file.path } }
+ end
+end
+
+RSpec.shared_examples 'LFS http 200 workhorse response' do
+ it_behaves_like 'LFS http expected response code and message' do
+ let(:response_code) { :ok }
+ let(:content_type) { Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE }
end
end
RSpec.shared_examples 'LFS http 401 response' do
it_behaves_like 'LFS http expected response code and message' do
- let(:response_code) { 401 }
+ let(:response_code) { :unauthorized }
+ let(:content_type) { 'text/plain' }
end
end
RSpec.shared_examples 'LFS http 403 response' do
it_behaves_like 'LFS http expected response code and message' do
- let(:response_code) { 403 }
+ let(:response_code) { :forbidden }
let(:message) { 'Access forbidden. Check your access level.' }
end
end
RSpec.shared_examples 'LFS http 501 response' do
it_behaves_like 'LFS http expected response code and message' do
- let(:response_code) { 501 }
+ let(:response_code) { :not_implemented }
let(:message) { 'Git LFS is not enabled on this GitLab server, contact your admin.' }
end
end
RSpec.shared_examples 'LFS http 404 response' do
it_behaves_like 'LFS http expected response code and message' do
- let(:response_code) { 404 }
+ let(:response_code) { :not_found }
end
end
RSpec.shared_examples 'LFS http expected response code and message' do
let(:response_code) { }
- let(:message) { }
+ let(:response_headers) { {} }
+ let(:content_type) { LfsRequest::CONTENT_TYPE }
+ let(:message) {}
- it 'responds with the expected response code and message' do
+ specify do
expect(response).to have_gitlab_http_status(response_code)
+ expect(response.headers.to_hash).to include(response_headers)
+ expect(response.media_type).to match(content_type)
expect(json_response['message']).to eq(message) if message
end
end
+
+RSpec.shared_examples 'LFS http requests' do
+ include LfsHttpHelpers
+
+ let(:authorize_guest) {}
+ let(:authorize_download) {}
+ let(:authorize_upload) {}
+
+ let(:lfs_object) { create(:lfs_object, :with_file) }
+ let(:sample_oid) { lfs_object.oid }
+
+ let(:authorization) { authorize_user }
+ let(:headers) do
+ {
+ 'Authorization' => authorization,
+ 'X-Sendfile-Type' => 'X-Sendfile'
+ }
+ end
+
+ let(:request_download) do
+ get objects_url(container, sample_oid), params: {}, headers: headers
+ end
+
+ let(:request_upload) do
+ post_lfs_json batch_url(container), upload_body(multiple_objects), headers
+ end
+
+ before do
+ stub_lfs_setting(enabled: true)
+ end
+
+ context 'when LFS is disabled globally' do
+ before do
+ stub_lfs_setting(enabled: false)
+ end
+
+ describe 'download request' do
+ before do
+ request_download
+ end
+
+ it_behaves_like 'LFS http 501 response'
+ end
+
+ describe 'upload request' do
+ before do
+ request_upload
+ end
+
+ it_behaves_like 'LFS http 501 response'
+ end
+ end
+
+ context 'unauthenticated' do
+ let(:headers) { {} }
+
+ describe 'download request' do
+ before do
+ request_download
+ end
+
+ it_behaves_like 'LFS http 401 response'
+ end
+
+ describe 'upload request' do
+ before do
+ request_upload
+ end
+
+ it_behaves_like 'LFS http 401 response'
+ end
+ end
+
+ context 'without access' do
+ describe 'download request' do
+ before do
+ request_download
+ end
+
+ it_behaves_like 'LFS http 404 response'
+ end
+
+ describe 'upload request' do
+ before do
+ request_upload
+ end
+
+ it_behaves_like 'LFS http 404 response'
+ end
+ end
+
+ context 'with guest access' do
+ before do
+ authorize_guest
+ end
+
+ describe 'download request' do
+ before do
+ request_download
+ end
+
+ it_behaves_like 'LFS http 404 response'
+ end
+
+ describe 'upload request' do
+ before do
+ request_upload
+ end
+
+ it_behaves_like 'LFS http 404 response'
+ end
+ end
+
+ context 'with download permission' do
+ before do
+ authorize_download
+ end
+
+ describe 'download request' do
+ before do
+ request_download
+ end
+
+ it_behaves_like 'LFS http 200 blob response'
+
+ context 'when container does not exist' do
+ def objects_url(*args)
+ super.sub(container.full_path, 'missing/path')
+ end
+
+ it_behaves_like 'LFS http 404 response'
+ end
+ end
+
+ describe 'upload request' do
+ before do
+ request_upload
+ end
+
+ it_behaves_like 'LFS http 403 response'
+ end
+ end
+
+ context 'with upload permission' do
+ before do
+ authorize_upload
+ end
+
+ describe 'upload request' do
+ before do
+ request_upload
+ end
+
+ it_behaves_like 'LFS http 200 response'
+ end
+ end
+
+ describe 'deprecated API' do
+ shared_examples 'deprecated request' do
+ before do
+ request
+ end
+
+ it_behaves_like 'LFS http expected response code and message' do
+ let(:response_code) { 501 }
+ let(:message) { 'Server supports batch API only, please update your Git LFS client to version 1.0.1 and up.' }
+ end
+ end
+
+ context 'when fetching LFS object using deprecated API' do
+ subject(:request) do
+ get deprecated_objects_url(container, sample_oid), params: {}, headers: headers
+ end
+
+ it_behaves_like 'deprecated request'
+ end
+
+ context 'when handling LFS request using deprecated API' do
+ subject(:request) do
+ post_lfs_json deprecated_objects_url(container), nil, headers
+ end
+
+ it_behaves_like 'deprecated request'
+ end
+
+ def deprecated_objects_url(container, oid = nil)
+ File.join(["#{container.http_url_to_repo}/info/lfs/objects/", oid].compact)
+ end
+ end
+end
diff --git a/spec/views/search/_results.html.haml_spec.rb b/spec/views/search/_results.html.haml_spec.rb
index 6299fd0cf36..58912eab51e 100644
--- a/spec/views/search/_results.html.haml_spec.rb
+++ b/spec/views/search/_results.html.haml_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe 'search/_results' do
let_it_be(:wiki_blob) { create(:wiki_page, project: project, content: '*') }
let_it_be(:user) { create(:admin) }
- %w[issues merge_requests].each do |search_scope|
+ %w[issues blobs notes wiki_blobs merge_requests milestones].each do |search_scope|
context "when scope is #{search_scope}" do
let(:scope) { search_scope }
let(:search_objects) { Gitlab::ProjectSearchResults.new(user, '*', project: project).objects(scope) }
@@ -55,30 +55,16 @@ RSpec.describe 'search/_results' do
expect(rendered).to have_selector('[data-track-property=search_result]')
end
- it 'does render the sidebar' do
+ it 'renders the state filter drop down' do
render
- expect(rendered).to have_selector('#js-search-sidebar')
- end
- end
- end
-
- %w[blobs notes wiki_blobs milestones].each do |search_scope|
- context "when scope is #{search_scope}" do
- let(:scope) { search_scope }
- let(:search_objects) { Gitlab::ProjectSearchResults.new(user, '*', project: project).objects(scope) }
-
- it 'renders the click text event tracking attributes' do
- render
-
- expect(rendered).to have_selector('[data-track-event=click_text]')
- expect(rendered).to have_selector('[data-track-property=search_result]')
+ expect(rendered).to have_selector('#js-search-filter-by-state')
end
- it 'does not render the sidebar' do
+ it 'renders the confidential drop down' do
render
- expect(rendered).not_to have_selector('#js-search-sidebar')
+ expect(rendered).to have_selector('#js-search-filter-by-confidential')
end
end
end