diff --git a/app/assets/javascripts/feature_flags/components/edit_feature_flag.vue b/app/assets/javascripts/feature_flags/components/edit_feature_flag.vue index 210212fa900..b1e60066e11 100644 --- a/app/assets/javascripts/feature_flags/components/edit_feature_flag.vue +++ b/app/assets/javascripts/feature_flags/components/edit_feature_flag.vue @@ -91,9 +91,9 @@ export default {

{{ title }}

-
+

{{ message }}

-
+ +import { GlAlert } from '@gitlab/ui'; import { mapState, mapActions } from 'vuex'; import axios from '~/lib/utils/axios_utils'; import FeatureFlagForm from './form.vue'; @@ -10,6 +11,7 @@ import featureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin'; export default { components: { FeatureFlagForm, + GlAlert, }, mixins: [featureFlagsMixin()], inject: { @@ -61,9 +63,9 @@ export default {

{{ s__('FeatureFlags|New feature flag') }}

-
-

{{ message }}

-
+ +

{{ message }}

+
+import { GlSorting, GlSortingItem, GlFilteredSearch } from '@gitlab/ui'; +import { mapState, mapActions } from 'vuex'; +import { __, s__ } from '~/locale'; +import PackageTypeToken from './tokens/package_type_token.vue'; +import { ASCENDING_ODER, DESCENDING_ORDER } from '../constants'; +import getTableHeaders from '../utils'; + +export default { + components: { + GlSorting, + GlSortingItem, + GlFilteredSearch, + }, + computed: { + ...mapState({ + isGroupPage: (state) => state.config.isGroupPage, + orderBy: (state) => state.sorting.orderBy, + sort: (state) => state.sorting.sort, + filter: (state) => state.filter, + }), + internalFilter: { + get() { + return this.filter; + }, + set(value) { + this.setFilter(value); + }, + }, + sortText() { + const field = this.sortableFields.find((s) => s.orderBy === this.orderBy); + return field ? field.label : ''; + }, + sortableFields() { + return getTableHeaders(this.isGroupPage); + }, + isSortAscending() { + return this.sort === ASCENDING_ODER; + }, + tokens() { + return [ + { + type: 'type', + icon: 'package', + title: s__('PackageRegistry|Type'), + unique: true, + token: PackageTypeToken, + operators: [{ value: '=', description: __('is'), default: 'true' }], + }, + ]; + }, + }, + methods: { + ...mapActions(['setSorting', 'setFilter']), + onDirectionChange() { + const sort = this.isSortAscending ? DESCENDING_ORDER : ASCENDING_ODER; + this.setSorting({ sort }); + this.$emit('sort:changed'); + }, + onSortItemClick(item) { + this.setSorting({ orderBy: item }); + this.$emit('sort:changed'); + }, + clearSearch() { + this.setFilter([]); + this.$emit('filter:changed'); + }, + }, +}; + + + diff --git a/app/assets/javascripts/packages/list/components/packages_filter.vue b/app/assets/javascripts/packages/list/components/packages_filter.vue deleted file mode 100644 index 17398071217..00000000000 --- a/app/assets/javascripts/packages/list/components/packages_filter.vue +++ /dev/null @@ -1,21 +0,0 @@ - - - diff --git a/app/assets/javascripts/packages/list/components/packages_list_app.vue b/app/assets/javascripts/packages/list/components/packages_list_app.vue index 2a786b92515..902547cd9e1 100644 --- a/app/assets/javascripts/packages/list/components/packages_list_app.vue +++ b/app/assets/javascripts/packages/list/components/packages_list_app.vue @@ -1,39 +1,43 @@ - - diff --git a/app/assets/javascripts/packages/list/components/tokens/package_type_token.vue b/app/assets/javascripts/packages/list/components/tokens/package_type_token.vue new file mode 100644 index 00000000000..74b6774712e --- /dev/null +++ b/app/assets/javascripts/packages/list/components/tokens/package_type_token.vue @@ -0,0 +1,26 @@ + + + diff --git a/app/assets/javascripts/packages/list/constants.js b/app/assets/javascripts/packages/list/constants.js index e14696e0d1c..9c78778d9f8 100644 --- a/app/assets/javascripts/packages/list/constants.js +++ b/app/assets/javascripts/packages/list/constants.js @@ -55,11 +55,7 @@ export const SORT_FIELDS = [ }, ]; -export const PACKAGE_REGISTRY_TABS = [ - { - title: __('All'), - type: null, - }, +export const PACKAGE_TYPES = [ { title: s__('PackageRegistry|Composer'), type: PackageType.COMPOSER, diff --git a/app/assets/javascripts/packages/list/stores/actions.js b/app/assets/javascripts/packages/list/stores/actions.js index bbc11e3cf13..055a3984e0a 100644 --- a/app/assets/javascripts/packages/list/stores/actions.js +++ b/app/assets/javascripts/packages/list/stores/actions.js @@ -15,7 +15,6 @@ import { getNewPaginationPage } from '../utils'; export const setInitialState = ({ commit }, data) => commit(types.SET_INITIAL_STATE, data); export const setLoading = ({ commit }, data) => commit(types.SET_MAIN_LOADING, data); export const setSorting = ({ commit }, data) => commit(types.SET_SORTING, data); -export const setSelectedType = ({ commit }, data) => commit(types.SET_SELECTED_TYPE, data); export const setFilter = ({ commit }, data) => commit(types.SET_FILTER, data); export const receivePackagesListSuccess = ({ commit }, { data, headers }) => { @@ -29,9 +28,9 @@ export const requestPackagesList = ({ dispatch, state }, params = {}) => { const { page = DEFAULT_PAGE, per_page = DEFAULT_PAGE_SIZE } = params; const { sort, orderBy } = state.sorting; - const type = state.selectedType?.type?.toLowerCase(); - const nameFilter = state.filterQuery?.toLowerCase(); - const packageFilters = { package_type: type, package_name: nameFilter }; + const type = state.filter.find((f) => f.type === 'type'); + const name = state.filter.find((f) => f.type === 'filtered-search-term'); + const packageFilters = { package_type: type?.value?.data, package_name: name?.value?.data }; const apiMethod = state.config.isGroupPage ? 'groupPackages' : 'projectPackages'; diff --git a/app/assets/javascripts/packages/list/stores/mutation_types.js b/app/assets/javascripts/packages/list/stores/mutation_types.js index a5a584ccf1f..561ad97f7e3 100644 --- a/app/assets/javascripts/packages/list/stores/mutation_types.js +++ b/app/assets/javascripts/packages/list/stores/mutation_types.js @@ -4,5 +4,4 @@ export const SET_PACKAGE_LIST_SUCCESS = 'SET_PACKAGE_LIST_SUCCESS'; export const SET_PAGINATION = 'SET_PAGINATION'; export const SET_MAIN_LOADING = 'SET_MAIN_LOADING'; export const SET_SORTING = 'SET_SORTING'; -export const SET_SELECTED_TYPE = 'SET_SELECTED_TYPE'; export const SET_FILTER = 'SET_FILTER'; diff --git a/app/assets/javascripts/packages/list/stores/mutations.js b/app/assets/javascripts/packages/list/stores/mutations.js index 2fe7981b3d9..aad6c101525 100644 --- a/app/assets/javascripts/packages/list/stores/mutations.js +++ b/app/assets/javascripts/packages/list/stores/mutations.js @@ -28,11 +28,7 @@ export default { state.sorting = { ...state.sorting, ...sorting }; }, - [types.SET_SELECTED_TYPE](state, type) { - state.selectedType = type; - }, - - [types.SET_FILTER](state, query) { - state.filterQuery = query; + [types.SET_FILTER](state, filter) { + state.filter = filter; }, }; diff --git a/app/assets/javascripts/packages/list/stores/state.js b/app/assets/javascripts/packages/list/stores/state.js index 18ab2390b87..60f02eddc9f 100644 --- a/app/assets/javascripts/packages/list/stores/state.js +++ b/app/assets/javascripts/packages/list/stores/state.js @@ -1,5 +1,3 @@ -import { PACKAGE_REGISTRY_TABS } from '../constants'; - export default () => ({ /** * Determine if the component is loading data from the API @@ -49,9 +47,8 @@ export default () => ({ /** * The search query that is used to filter packages by name */ - filterQuery: '', + filter: [], /** * The selected TAB of the package types tabs */ - selectedType: PACKAGE_REGISTRY_TABS[0], }); diff --git a/app/assets/javascripts/reports/codequality_report/grouped_codequality_reports_app.vue b/app/assets/javascripts/reports/codequality_report/grouped_codequality_reports_app.vue index 5c8f31d7da0..42c6df44b5d 100644 --- a/app/assets/javascripts/reports/codequality_report/grouped_codequality_reports_app.vue +++ b/app/assets/javascripts/reports/codequality_report/grouped_codequality_reports_app.vue @@ -2,6 +2,7 @@ import { mapState, mapActions, mapGetters } from 'vuex'; import { componentNames } from '~/reports/components/issue_body'; import { s__, sprintf } from '~/locale'; +import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin'; import ReportSection from '~/reports/components/report_section.vue'; import createStore from './store'; @@ -11,6 +12,7 @@ export default { components: { ReportSection, }, + mixins: [glFeatureFlagsMixin()], props: { headPath: { type: String, @@ -30,6 +32,11 @@ export default { required: false, default: null, }, + codequalityReportsPath: { + type: String, + required: false, + default: '', + }, codequalityHelpPath: { type: String, required: true, @@ -37,7 +44,7 @@ export default { }, componentNames, computed: { - ...mapState(['newIssues', 'resolvedIssues']), + ...mapState(['newIssues', 'resolvedIssues', 'hasError', 'statusReason']), ...mapGetters([ 'hasCodequalityIssues', 'codequalityStatus', @@ -51,10 +58,11 @@ export default { headPath: this.headPath, baseBlobPath: this.baseBlobPath, headBlobPath: this.headBlobPath, + reportsPath: this.codequalityReportsPath, helpPath: this.codequalityHelpPath, }); - this.fetchReports(); + this.fetchReports(this.glFeatures.codequalityMrDiff); }, methods: { ...mapActions(['fetchReports', 'setPaths']), @@ -80,5 +88,7 @@ export default { :popover-options="codequalityPopover" :show-report-section-status-icon="false" class="js-codequality-widget mr-widget-border-top mr-report" - /> + > + + diff --git a/app/assets/javascripts/reports/codequality_report/store/actions.js b/app/assets/javascripts/reports/codequality_report/store/actions.js index e5fb5caca2e..ddd1747899f 100644 --- a/app/assets/javascripts/reports/codequality_report/store/actions.js +++ b/app/assets/javascripts/reports/codequality_report/store/actions.js @@ -4,9 +4,20 @@ import { parseCodeclimateMetrics, doCodeClimateComparison } from './utils/codequ export const setPaths = ({ commit }, paths) => commit(types.SET_PATHS, paths); -export const fetchReports = ({ state, dispatch, commit }) => { +export const fetchReports = ({ state, dispatch, commit }, diffFeatureFlagEnabled) => { commit(types.REQUEST_REPORTS); + if (diffFeatureFlagEnabled) { + return axios + .get(state.reportsPath) + .then(({ data }) => { + return dispatch('receiveReportsSuccess', { + newIssues: parseCodeclimateMetrics(data.new_errors, state.headBlobPath), + resolvedIssues: parseCodeclimateMetrics(data.resolved_errors, state.baseBlobPath), + }); + }) + .catch((error) => dispatch('receiveReportsError', error)); + } if (!state.basePath) { return dispatch('receiveReportsError'); } @@ -18,13 +29,13 @@ export const fetchReports = ({ state, dispatch, commit }) => { ), ) .then((data) => dispatch('receiveReportsSuccess', data)) - .catch(() => dispatch('receiveReportsError')); + .catch((error) => dispatch('receiveReportsError', error)); }; export const receiveReportsSuccess = ({ commit }, data) => { commit(types.RECEIVE_REPORTS_SUCCESS, data); }; -export const receiveReportsError = ({ commit }) => { - commit(types.RECEIVE_REPORTS_ERROR); +export const receiveReportsError = ({ commit }, error) => { + commit(types.RECEIVE_REPORTS_ERROR, error); }; diff --git a/app/assets/javascripts/reports/codequality_report/store/mutations.js b/app/assets/javascripts/reports/codequality_report/store/mutations.js index 7ef4f3ce2db..095e6637966 100644 --- a/app/assets/javascripts/reports/codequality_report/store/mutations.js +++ b/app/assets/javascripts/reports/codequality_report/store/mutations.js @@ -6,6 +6,7 @@ export default { state.headPath = paths.headPath; state.baseBlobPath = paths.baseBlobPath; state.headBlobPath = paths.headBlobPath; + state.reportsPath = paths.reportsPath; state.helpPath = paths.helpPath; }, [types.REQUEST_REPORTS](state) { @@ -13,12 +14,14 @@ export default { }, [types.RECEIVE_REPORTS_SUCCESS](state, data) { state.hasError = false; + state.statusReason = ''; state.isLoading = false; state.newIssues = data.newIssues; state.resolvedIssues = data.resolvedIssues; }, - [types.RECEIVE_REPORTS_ERROR](state) { + [types.RECEIVE_REPORTS_ERROR](state, error) { state.isLoading = false; state.hasError = true; + state.statusReason = error?.response?.data?.status_reason; }, }; diff --git a/app/assets/javascripts/reports/codequality_report/store/state.js b/app/assets/javascripts/reports/codequality_report/store/state.js index 38ab53b432e..b39ff4f9d66 100644 --- a/app/assets/javascripts/reports/codequality_report/store/state.js +++ b/app/assets/javascripts/reports/codequality_report/store/state.js @@ -1,12 +1,14 @@ export default () => ({ basePath: null, headPath: null, + reportsPath: null, baseBlobPath: null, headBlobPath: null, isLoading: false, hasError: false, + statusReason: '', newIssues: [], resolvedIssues: [], diff --git a/app/assets/javascripts/reports/codequality_report/store/utils/codequality_comparison.js b/app/assets/javascripts/reports/codequality_report/store/utils/codequality_comparison.js index fd775f52f7d..b252c8c9817 100644 --- a/app/assets/javascripts/reports/codequality_report/store/utils/codequality_comparison.js +++ b/app/assets/javascripts/reports/codequality_report/store/utils/codequality_comparison.js @@ -3,8 +3,10 @@ import CodeQualityComparisonWorker from '../../workers/codequality_comparison_wo export const parseCodeclimateMetrics = (issues = [], path = '') => { return issues.map((issue) => { const parsedIssue = { - ...issue, name: issue.description, + path: issue.file_path, + urlPath: `${path}/${issue.file_path}#L${issue.line}`, + ...issue, }; if (issue?.location?.path) { diff --git a/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue b/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue index 99105a6eb62..741ccca9918 100644 --- a/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue +++ b/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue @@ -464,6 +464,7 @@ export default { :head-path="mr.codeclimate.head_path" :head-blob-path="mr.headBlobPath" :base-blob-path="mr.baseBlobPath" + :codequality-reports-path="mr.codequalityReportsPath" :codequality-help-path="mr.codequalityHelpPath" /> diff --git a/app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js b/app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js index a6bbab47a06..885bc46dfb6 100644 --- a/app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js +++ b/app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js @@ -241,10 +241,11 @@ export default class MergeRequestStore { this.isDismissedSuggestPipeline = data.is_dismissed_suggest_pipeline; this.securityReportsDocsPath = data.security_reports_docs_path; - // codeclimate + // code quality const blobPath = data.blob_path || {}; this.headBlobPath = blobPath.head_path || ''; this.baseBlobPath = blobPath.base_path || ''; + this.codequalityReportsPath = data.codequality_reports_path; this.codequalityHelpPath = data.codequality_help_path; this.codeclimate = data.codeclimate; diff --git a/app/finders/merge_requests/oldest_per_commit_finder.rb b/app/finders/merge_requests/oldest_per_commit_finder.rb new file mode 100644 index 00000000000..f50db43d7d2 --- /dev/null +++ b/app/finders/merge_requests/oldest_per_commit_finder.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +module MergeRequests + # OldestPerCommitFinder is used to retrieve the oldest merge requests for + # every given commit, grouped per commit SHA. + # + # This finder is useful when you need to efficiently retrieve the first/oldest + # merge requests for multiple commits, and you want to do so in batches; + # instead of running a query for every commit. + class OldestPerCommitFinder + def initialize(project) + @project = project + end + + # Returns a Hash that maps a commit ID to the oldest merge request that + # introduced that commit. + def execute(commits) + id_rows = MergeRequestDiffCommit + .oldest_merge_request_id_per_commit(@project.id, commits.map(&:id)) + + mrs = MergeRequest + .preload_target_project + .id_in(id_rows.map { |r| r[:merge_request_id] }) + .index_by(&:id) + + id_rows.each_with_object({}) do |row, hash| + if (mr = mrs[row[:merge_request_id]]) + hash[row[:sha]] = mr + end + end + end + end +end diff --git a/app/helpers/avatars_helper.rb b/app/helpers/avatars_helper.rb index 5457f96d506..eae65f04cbf 100644 --- a/app/helpers/avatars_helper.rb +++ b/app/helpers/avatars_helper.rb @@ -31,11 +31,10 @@ module AvatarsHelper end def avatar_icon_for_user(user = nil, size = nil, scale = 2, only_path: true) - if user - user.avatar_url(size: size, only_path: only_path) || default_avatar - else - gravatar_icon(nil, size, scale) - end + return gravatar_icon(nil, size, scale) unless user + return default_avatar if user.blocked? + + user.avatar_url(size: size, only_path: only_path) || default_avatar end def gravatar_icon(user_email = '', size = nil, scale = 2) diff --git a/app/models/merge_request_diff_commit.rb b/app/models/merge_request_diff_commit.rb index aacc9a92120..259690ef308 100644 --- a/app/models/merge_request_diff_commit.rb +++ b/app/models/merge_request_diff_commit.rb @@ -35,4 +35,23 @@ class MergeRequestDiffCommit < ApplicationRecord Gitlab::Database.bulk_insert(self.table_name, rows) # rubocop:disable Gitlab/BulkInsert end + + def self.oldest_merge_request_id_per_commit(project_id, shas) + # This method is defined here and not on MergeRequest, otherwise the SHA + # values used in the WHERE below won't be encoded correctly. + select(['merge_request_diff_commits.sha AS sha', 'min(merge_requests.id) AS merge_request_id']) + .joins(:merge_request_diff) + .joins( + 'INNER JOIN merge_requests ' \ + 'ON merge_requests.latest_merge_request_diff_id = merge_request_diffs.id' + ) + .where(sha: shas) + .where( + merge_requests: { + target_project_id: project_id, + state_id: MergeRequest.available_states[:merged] + } + ) + .group(:sha) + end end diff --git a/app/services/repositories/changelog_service.rb b/app/services/repositories/changelog_service.rb new file mode 100644 index 00000000000..f30b64b9b32 --- /dev/null +++ b/app/services/repositories/changelog_service.rb @@ -0,0 +1,99 @@ +# frozen_string_literal: true + +module Repositories + # A service class for generating a changelog section. + class ChangelogService + DEFAULT_TRAILER = 'Changelog' + DEFAULT_FILE = 'CHANGELOG.md' + + # The `project` specifies the `Project` to generate the changelog section + # for. + # + # The `user` argument specifies a `User` to use for committing the changes + # to the Git repository. + # + # The `version` arguments must be a version `String` using semantic + # versioning as the format. + # + # The arguments `from` and `to` must specify a Git ref or SHA to use for + # fetching the commits to include in the changelog. The SHA/ref set in the + # `from` argument isn't included in the list. + # + # The `date` argument specifies the date of the release, and defaults to the + # current time/date. + # + # The `branch` argument specifies the branch to commit the changes to. The + # branch must already exist. + # + # The `trailer` argument is the Git trailer to use for determining what + # commits to include in the changelog. + # + # The `file` arguments specifies the name/path of the file to commit the + # changes to. If the file doesn't exist, it's created automatically. + # + # The `message` argument specifies the commit message to use when committing + # the changelog changes. + # + # rubocop: disable Metrics/ParameterLists + def initialize( + project, + user, + version:, + from:, + to:, + date: DateTime.now, + branch: project.default_branch_or_master, + trailer: DEFAULT_TRAILER, + file: DEFAULT_FILE, + message: "Add changelog for version #{version}" + ) + @project = project + @user = user + @version = version + @from = from + @to = to + @date = date + @branch = branch + @trailer = trailer + @file = file + @message = message + end + # rubocop: enable Metrics/ParameterLists + + def execute + # For every entry we want to only include the merge request that + # originally introduced the commit, which is the oldest merge request that + # contains the commit. We fetch there merge requests in batches, reducing + # the number of SQL queries needed to get this data. + mrs_finder = MergeRequests::OldestPerCommitFinder.new(@project) + config = Gitlab::Changelog::Config.from_git(@project) + release = Gitlab::Changelog::Release + .new(version: @version, date: @date, config: config) + + commits = + CommitsWithTrailerFinder.new(project: @project, from: @from, to: @to) + + commits.each_page(@trailer) do |page| + mrs = mrs_finder.execute(page) + + # Preload the authors. This ensures we only need a single SQL query per + # batch of commits, instead of needing a query for every commit. + page.each(&:lazy_author) + + page.each do |commit| + release.add_entry( + title: commit.title, + commit: commit, + category: commit.trailers.fetch(@trailer), + author: commit.author, + merge_request: mrs[commit.id] + ) + end + end + + Gitlab::Changelog::Committer + .new(@project, @user) + .commit(release: release, file: @file, branch: @branch, message: @message) + end + end +end diff --git a/changelogs/unreleased/227349-package-registry-expand-the-filter-and-sort-functionality-on-the-p.yml b/changelogs/unreleased/227349-package-registry-expand-the-filter-and-sort-functionality-on-the-p.yml new file mode 100644 index 00000000000..043bb4531e0 --- /dev/null +++ b/changelogs/unreleased/227349-package-registry-expand-the-filter-and-sort-functionality-on-the-p.yml @@ -0,0 +1,5 @@ +--- +title: Redesign the search UI for the package list +merge_request: 52575 +author: +type: changed diff --git a/changelogs/unreleased/239031-yo-remove-avatar-blocked-user.yml b/changelogs/unreleased/239031-yo-remove-avatar-blocked-user.yml new file mode 100644 index 00000000000..888e3bd28c3 --- /dev/null +++ b/changelogs/unreleased/239031-yo-remove-avatar-blocked-user.yml @@ -0,0 +1,5 @@ +--- +title: Remove avatar of the blocked user +merge_request: 52051 +author: Yogi (@yo) +type: fixed diff --git a/changelogs/unreleased/changelog-api.yml b/changelogs/unreleased/changelog-api.yml new file mode 100644 index 00000000000..c6ab3248996 --- /dev/null +++ b/changelogs/unreleased/changelog-api.yml @@ -0,0 +1,5 @@ +--- +title: Add API for generating Markdown changelogs +merge_request: 52116 +author: +type: added diff --git a/changelogs/unreleased/update-feature-flag-error-banner.yml b/changelogs/unreleased/update-feature-flag-error-banner.yml new file mode 100644 index 00000000000..97921fa2db4 --- /dev/null +++ b/changelogs/unreleased/update-feature-flag-error-banner.yml @@ -0,0 +1,5 @@ +--- +title: Update styling of validation messages in New Feature Flag form +merge_request: 52217 +author: +type: changed diff --git a/changelogs/unreleased/yo-gl-button-promotion.yml b/changelogs/unreleased/yo-gl-button-promotion.yml new file mode 100644 index 00000000000..a7d1e131a64 --- /dev/null +++ b/changelogs/unreleased/yo-gl-button-promotion.yml @@ -0,0 +1,5 @@ +--- +title: Add `gl-button` to promotion buttons on issue sidebar +merge_request: 51287 +author: Yogi (@yo) +type: other diff --git a/config/feature_flags/development/changelog_api.yml b/config/feature_flags/development/changelog_api.yml new file mode 100644 index 00000000000..1c90f05a0ed --- /dev/null +++ b/config/feature_flags/development/changelog_api.yml @@ -0,0 +1,8 @@ +--- +name: changelog_api +introduced_by_url: '13.9' +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/300043 +milestone: '13.9' +type: development +group: group::source code +default_enabled: false diff --git a/config/object_store_settings.rb b/config/object_store_settings.rb index 9f5323426d9..5603ea51672 100644 --- a/config/object_store_settings.rb +++ b/config/object_store_settings.rb @@ -78,7 +78,7 @@ class ObjectStoreSettings # "background_upload" => false, # "proxy_download" => false, # "remote_directory" => "artifacts" - # } + # } # # Settings.lfs['object_store'] = { # "enabled" => true, @@ -97,7 +97,7 @@ class ObjectStoreSettings # "background_upload" => false, # "proxy_download" => true, # "remote_directory" => "lfs-objects" - # } + # } # # Note that with the common config: # 1. Only one object store credentials can now be used. This is @@ -124,9 +124,9 @@ class ObjectStoreSettings target_config = common_config.merge(overrides.slice(*ALLOWED_OBJECT_STORE_OVERRIDES)) section = settings.try(store_type) - next unless section + next unless uses_object_storage?(section) - if section['enabled'] && target_config['bucket'].blank? + if target_config['bucket'].blank? missing_bucket_for(store_type) next end @@ -140,10 +140,26 @@ class ObjectStoreSettings target_config['consolidated_settings'] = true section['object_store'] = target_config end + + settings end private + # Admins can selectively disable object storage for a specific type. If + # this hasn't been set, we assume that the consolidated settings + # should be used. + def uses_object_storage?(section) + # Use to_h to avoid https://gitlab.com/gitlab-org/gitlab/-/issues/286873 + section = section.to_h + + enabled_globally = section.fetch('enabled', false) + object_store_settings = section.fetch('object_store', {}) + os_enabled = object_store_settings.fetch('enabled', true) + + enabled_globally && os_enabled + end + # We only can use the common object storage settings if: # 1. The common settings are defined # 2. The legacy settings are not defined @@ -152,8 +168,9 @@ class ObjectStoreSettings return false unless settings.dig('object_store', 'connection').present? WORKHORSE_ACCELERATED_TYPES.each do |store| - # to_h is needed because something strange happens to - # Settingslogic#dig when stub_storage_settings is run in tests: + # to_h is needed because we define `default` as a Gitaly storage name + # in stub_storage_settings. This causes Settingslogic to redefine Hash#default, + # which causes Hash#dig to fail when the key doesn't exist: https://gitlab.com/gitlab-org/gitlab/-/issues/286873 # # (byebug) section.dig # *** ArgumentError Exception: wrong number of arguments (given 0, expected 1+) diff --git a/db/migrate/20210127143025_add_oldest_merge_requests_index.rb b/db/migrate/20210127143025_add_oldest_merge_requests_index.rb new file mode 100644 index 00000000000..acd690a5cce --- /dev/null +++ b/db/migrate/20210127143025_add_oldest_merge_requests_index.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +class AddOldestMergeRequestsIndex < ActiveRecord::Migration[6.0] + include Gitlab::Database::SchemaHelpers + include Gitlab::Database::MigrationHelpers + + disable_ddl_transaction! + + # Set this constant to true if this migration requires downtime. + DOWNTIME = false + + INDEX = 'index_on_merge_requests_for_latest_diffs' + + def up + return if index_exists_by_name?('merge_requests', INDEX) + + execute "CREATE INDEX CONCURRENTLY #{INDEX} ON merge_requests " \ + 'USING btree (target_project_id) INCLUDE (id, latest_merge_request_diff_id)' + + create_comment( + 'INDEX', + INDEX, + 'Index used to efficiently obtain the oldest merge request for a commit SHA' + ) + end + + def down + return unless index_exists_by_name?('merge_requests', INDEX) + + execute "DROP INDEX CONCURRENTLY #{INDEX}" + end +end diff --git a/db/schema_migrations/20210127143025 b/db/schema_migrations/20210127143025 new file mode 100644 index 00000000000..a4875304c0b --- /dev/null +++ b/db/schema_migrations/20210127143025 @@ -0,0 +1 @@ +c173ba86340efe39977f1b319d1ebcead634e3bfe819a30e230fb4f81766f28a \ No newline at end of file diff --git a/db/structure.sql b/db/structure.sql index c7c6fae7a10..9dfe8d480d9 100644 --- a/db/structure.sql +++ b/db/structure.sql @@ -22487,6 +22487,10 @@ CREATE UNIQUE INDEX index_on_instance_statistics_recorded_at_and_identifier ON a CREATE INDEX index_on_label_links_all_columns ON label_links USING btree (target_id, label_id, target_type); +CREATE INDEX index_on_merge_requests_for_latest_diffs ON merge_requests USING btree (target_project_id) INCLUDE (id, latest_merge_request_diff_id); + +COMMENT ON INDEX index_on_merge_requests_for_latest_diffs IS 'Index used to efficiently obtain the oldest merge request for a commit SHA'; + CREATE INDEX index_on_namespaces_lower_name ON namespaces USING btree (lower((name)::text)); CREATE INDEX index_on_namespaces_lower_path ON namespaces USING btree (lower((path)::text)); diff --git a/doc/.vale/gitlab/spelling-exceptions.txt b/doc/.vale/gitlab/spelling-exceptions.txt index 6448fd13979..1e379236395 100644 --- a/doc/.vale/gitlab/spelling-exceptions.txt +++ b/doc/.vale/gitlab/spelling-exceptions.txt @@ -2,6 +2,7 @@ Akismet Alertmanager Algolia Alibaba +Aliyun allowlist allowlisted allowlisting @@ -34,6 +35,7 @@ autoscaler autoscales autoscaling awardable +awardables Axios Azure B-tree @@ -111,6 +113,7 @@ Dangerfile datetime Debian Decompressor +decryptable deduplicate deduplicated deduplicates @@ -168,6 +171,7 @@ Figma Filebeat Fio firewalled +firewalling Flawfinder Flowdock Fluentd @@ -272,6 +276,7 @@ libFuzzer Libravatar liveness Lograge +logrotate Logstash lookahead lookaheads @@ -282,6 +287,7 @@ loopback Lucene Maildir Mailgun +Mailroom Makefile Makefiles Markdown @@ -505,6 +511,8 @@ spidering Splunk SpotBugs Stackdriver +starrer +starrers storable storages strace @@ -525,6 +533,8 @@ subnet subnets subnetting subpath +subproject +subprojects subqueried subqueries subquery @@ -534,9 +544,12 @@ substrings subtree subtrees sudo +supercookie +supercookies swappiness swimlane swimlanes +syncable Sysbench syslog tanuki @@ -633,6 +646,9 @@ unresolved unresolving unschedule unscoped +unshare +unshared +unshares unstage unstaged unstages diff --git a/doc/administration/auth/ldap/index.md b/doc/administration/auth/ldap/index.md index 9945d330db0..74621f9c1ba 100644 --- a/doc/administration/auth/ldap/index.md +++ b/doc/administration/auth/ldap/index.md @@ -530,7 +530,12 @@ The process executes the following access checks: In Active Directory, a user is marked as disabled/blocked if the user account control attribute (`userAccountControl:1.2.840.113556.1.4.803`) has bit 2 set. -For more information, see + + + +For more information, see [Bitmask Searches in LDAP](https://ctovswild.com/2009/09/03/bitmask-searches-in-ldap/). + + The user is set to an `ldap_blocked` state in GitLab if the previous conditions fail. This means the user is not able to sign in or push/pull code. diff --git a/doc/administration/auth/oidc.md b/doc/administration/auth/oidc.md index 158182edfb5..1ddf75e7c1b 100644 --- a/doc/administration/auth/oidc.md +++ b/doc/administration/auth/oidc.md @@ -175,6 +175,6 @@ If you're having trouble, here are some tips: OAuth2 access token if `client_auth_method` is not defined or if set to `basic`. If you are seeing 401 errors upon retrieving the `userinfo` endpoint, you may want to check your OpenID Web server configuration. For example, for - [oauth2-server-php](https://github.com/bshaffer/oauth2-server-php), you + [`oauth2-server-php`](https://github.com/bshaffer/oauth2-server-php), you may need to [add a configuration parameter to Apache](https://github.com/bshaffer/oauth2-server-php/issues/926#issuecomment-387502778). diff --git a/doc/administration/geo/replication/geo_validation_tests.md b/doc/administration/geo/replication/geo_validation_tests.md index fd4ab48fe53..f050d3e708c 100644 --- a/doc/administration/geo/replication/geo_validation_tests.md +++ b/doc/administration/geo/replication/geo_validation_tests.md @@ -53,7 +53,7 @@ The following are GitLab upgrade validation tests we performed. - Outcome: Partial success because we did not run the looping pipeline during the demo to validate zero-downtime. - Follow up issues: - - [Clarify hup Puma/Unicorn should include deploy node](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5460) + - [Clarify how Puma/Unicorn should include deploy node](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5460) - [Investigate MR creation failure after upgrade to 12.9.10](https://gitlab.com/gitlab-org/gitlab/-/issues/223282) Closed as false positive. ### February 2020 @@ -128,7 +128,7 @@ The following are PostgreSQL upgrade validation tests we performed. PostgreSQL 12 with a database cluster on the primary is not recommended until the issues are resolved. - Known issues for PostgreSQL clusters: - [Ensure Patroni detects PostgreSQL update](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5423) - - [Allow configuring permanent replication slots in patroni](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5628) + - [Allow configuring permanent replication slots in Patroni](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5628) ### August 2020 diff --git a/doc/administration/geo/setup/database.md b/doc/administration/geo/setup/database.md index cbe282c9b10..c9fd94bf2b2 100644 --- a/doc/administration/geo/setup/database.md +++ b/doc/administration/geo/setup/database.md @@ -489,7 +489,7 @@ This experimental implementation has the following limitations: - Whenever `gitlab-ctl reconfigure` runs on a Patroni Leader instance, there's a chance the node will be demoted due to the required short-time restart. To avoid this, you can pause auto-failover by running `gitlab-ctl patroni pause`. - After a reconfigure, it unpauses on its own. + After a reconfigure, it resumes on its own. For instructions about how to set up Patroni on the primary node, see the [PostgreSQL replication and failover with Omnibus GitLab](../../postgresql/replication_and_failover.md#patroni) page. @@ -644,8 +644,8 @@ With Patroni it's now possible to support that. In order to migrate the existing 1. [Configure a permanent replication slot](#step-1-configure-patroni-permanent-replication-slot-on-the-primary-site). 1. [Configure a Standby Cluster](#step-2-configure-a-standby-cluster-on-the-secondary-site) on that single node machine. - -You will end up with a "Standby Cluster" with a single node. That allows you to later on add additional patroni nodes + +You will end up with a "Standby Cluster" with a single node. That allows you to later on add additional Patroni nodes by following the same instructions above. ## Troubleshooting diff --git a/doc/administration/gitaly/index.md b/doc/administration/gitaly/index.md index 7491807c501..6738b912dab 100644 --- a/doc/administration/gitaly/index.md +++ b/doc/administration/gitaly/index.md @@ -121,7 +121,7 @@ The following list depicts the network architecture of Gitaly: - GitLab Shell. - Elasticsearch indexer. - Gitaly itself. -- A Gitaly server must be able to make RPC calls **to itself** by uing its own +- A Gitaly server must be able to make RPC calls **to itself** by using its own `(Gitaly address, Gitaly token)` pair as specified in `/config/gitlab.yml`. - Authentication is done through a static token which is shared among the Gitaly and GitLab Rails nodes. diff --git a/doc/administration/gitaly/praefect.md b/doc/administration/gitaly/praefect.md index 035f06fb289..8016a572da2 100644 --- a/doc/administration/gitaly/praefect.md +++ b/doc/administration/gitaly/praefect.md @@ -65,7 +65,7 @@ Gitaly Cluster and [Geo](../geo/index.md) both provide redundancy. However the r not aware when Gitaly Cluster is used. - Geo provides [replication](../geo/index.md) and [disaster recovery](../geo/disaster_recovery/index.md) for an entire instance of GitLab. Users know when they are using Geo for - [replication](../geo/index.md). Geo [replicates multiple datatypes](../geo/replication/datatypes.md#limitations-on-replicationverification), + [replication](../geo/index.md). Geo [replicates multiple data types](../geo/replication/datatypes.md#limitations-on-replicationverification), including Git data. The following table outlines the major differences between Gitaly Cluster and Geo: diff --git a/doc/administration/integration/kroki.md b/doc/administration/integration/kroki.md index af2c86c5ae4..8bf1b18131b 100644 --- a/doc/administration/integration/kroki.md +++ b/doc/administration/integration/kroki.md @@ -24,6 +24,8 @@ The **Kroki URL** is the hostname of the server running the container. The [`yuzutech/kroki`](https://hub.docker.com/r/yuzutech/kroki) image contains the following diagrams libraries out-of-the-box: + + - [Bytefield](https://bytefield-svg.deepsymmetry.org/) - [Ditaa](http://ditaa.sourceforge.net) - [Erd](https://github.com/BurntSushi/erd) @@ -37,6 +39,8 @@ The [`yuzutech/kroki`](https://hub.docker.com/r/yuzutech/kroki) image contains t - [Vega-Lite](https://github.com/vega/vega-lite) - [WaveDrom](https://wavedrom.com/) + + If you want to use additional diagram libraries, read the [Kroki installation](https://docs.kroki.io/kroki/setup/install/#_images) to learn how to start Kroki companion containers. @@ -138,8 +142,12 @@ Rel(banking_system, mainframe, "Uses") ![C4 PlantUML diagram](../img/kroki_c4_diagram.png) + + **Nomnoml** + + ```plaintext [nomnoml] .... @@ -159,4 +167,4 @@ Rel(banking_system, mainframe, "Uses") .... ``` -![Nomnoml diagram](../img/kroki_nomnoml_diagram.png) +![Diagram](../img/kroki_nomnoml_diagram.png) diff --git a/doc/administration/job_logs.md b/doc/administration/job_logs.md index b2c6864e671..1afadcaf668 100644 --- a/doc/administration/job_logs.md +++ b/doc/administration/job_logs.md @@ -73,7 +73,7 @@ these steps to move the logs to a new location without losing any data. ``` Use `--ignore-existing` so you don't override new job logs with older versions of the same log. -1. Unpause continuous integration data processing by editing `/etc/gitlab/gitlab.rb` and removing the `sidekiq` setting you updated earlier. +1. Resume continuous integration data processing by editing `/etc/gitlab/gitlab.rb` and removing the `sidekiq` setting you updated earlier. 1. Save the file and [reconfigure GitLab](restart_gitlab.md#omnibus-gitlab-reconfigure) for the changes to take effect. 1. Remove the old job logs storage location: diff --git a/doc/administration/logs.md b/doc/administration/logs.md index ac4f536b59f..05e4535294c 100644 --- a/doc/administration/logs.md +++ b/doc/administration/logs.md @@ -970,9 +970,13 @@ For Omnibus GitLab installations, Redis logs reside in `/var/log/gitlab/redis/`. For Omnibus GitLab installations, Alertmanager logs reside in `/var/log/gitlab/alertmanager/`. + + ## Crond Logs -For Omnibus GitLab installations, `crond` logs reside in `/var/log/gitlab/crond/`. +For Omnibus GitLab installations, crond logs reside in `/var/log/gitlab/crond/`. + + ## Grafana Logs @@ -980,7 +984,7 @@ For Omnibus GitLab installations, Grafana logs reside in `/var/log/gitlab/grafan ## LogRotate Logs -For Omnibus GitLab installations, logrotate logs reside in `/var/log/gitlab/logrotate/`. +For Omnibus GitLab installations, `logrotate` logs reside in `/var/log/gitlab/logrotate/`. ## GitLab Monitor Logs @@ -1023,14 +1027,14 @@ GitLab Support often asks for one of these, and maintains the required tools. ### Briefly tail the main logs If the bug or error is readily reproducible, save the main GitLab logs -[to a file](troubleshooting/linux_cheat_sheet.md#files--dirs) while reproducing the +[to a file](troubleshooting/linux_cheat_sheet.md#files-and-directories) while reproducing the problem a few times: ```shell sudo gitlab-ctl tail | tee /tmp/.log ``` -Conclude the log gathering with Ctrl + C. +Conclude the log gathering with Control + C. ### GitLabSOS diff --git a/doc/administration/maintenance_mode/index.md b/doc/administration/maintenance_mode/index.md index b601ccf8d00..ffb89d12872 100644 --- a/doc/administration/maintenance_mode/index.md +++ b/doc/administration/maintenance_mode/index.md @@ -108,7 +108,7 @@ both primary and secondaries will fail. ### Merge requests, issues, epics -All write actions except those mentioned above will fail. So, in maintenace mode, a user cannot update merge requests, issues, etc. +All write actions except those mentioned above will fail. So, in maintenance mode, a user cannot update merge requests, issues, etc. ### Container Registry diff --git a/doc/administration/operations/puma.md b/doc/administration/operations/puma.md index 44ac014650e..3b676010bfe 100644 --- a/doc/administration/operations/puma.md +++ b/doc/administration/operations/puma.md @@ -31,7 +31,7 @@ Unicorn in GitLab 14.0. When switching to Puma, Unicorn server configuration will _not_ carry over automatically, due to differences between the two application servers. For Omnibus-based deployments, see [Configuring Puma Settings](https://docs.gitlab.com/omnibus/settings/puma.html#configuring-puma-settings). -For Helm based deployments, see the [Webservice Chart documentation](https://docs.gitlab.com/charts/charts/gitlab/webservice/index.html). +For Helm based deployments, see the [`webservice` chart documentation](https://docs.gitlab.com/charts/charts/gitlab/webservice/index.html). Additionally we strongly recommend that multi-node deployments [configure their load balancers to use the readiness check](../load_balancer.md#readiness-check) due to a difference between Unicorn and Puma in how they handle connections during a restart of the service. diff --git a/doc/administration/packages/container_registry.md b/doc/administration/packages/container_registry.md index ab6202fef4c..63bb969afce 100644 --- a/doc/administration/packages/container_registry.md +++ b/doc/administration/packages/container_registry.md @@ -312,14 +312,14 @@ configuration. The different supported drivers are: -| Driver | Description | -|------------|-------------------------------------| -| filesystem | Uses a path on the local filesystem | -| Azure | Microsoft Azure Blob Storage | -| gcs | Google Cloud Storage | -| s3 | Amazon Simple Storage Service. Be sure to configure your storage bucket with the correct [S3 Permission Scopes](https://docs.docker.com/registry/storage-drivers/s3/#s3-permission-scopes). | -| swift | OpenStack Swift Object Storage | -| oss | Aliyun OSS | +| Driver | Description | +|--------------|--------------------------------------| +| `filesystem` | Uses a path on the local file system | +| `Azure` | Microsoft Azure Blob Storage | +| `gcs` | Google Cloud Storage | +| `s3` | Amazon Simple Storage Service. Be sure to configure your storage bucket with the correct [S3 Permission Scopes](https://docs.docker.com/registry/storage-drivers/s3/#s3-permission-scopes). | +| `swift` | OpenStack Swift Object Storage | +| `oss` | Aliyun OSS | Although most S3 compatible services (like [MinIO](https://min.io/)) should work with the Container Registry, we only guarantee support for AWS S3. Because we cannot assert the correctness of third-party S3 implementations, we can debug issues, but we cannot patch the registry unless an issue is reproducible against an AWS S3 bucket. diff --git a/doc/administration/packages/index.md b/doc/administration/packages/index.md index 0f391371a6a..25fce08d3d2 100644 --- a/doc/administration/packages/index.md +++ b/doc/administration/packages/index.md @@ -35,6 +35,8 @@ The Package Registry supports the following formats: The below table lists formats that are not supported, but are accepting Community contributions for. Consider contributing to GitLab. This [development documentation](../../development/packages.md) guides you through the process. + + | Format | Status | | ------ | ------ | | Chef | [#36889](https://gitlab.com/gitlab-org/gitlab/-/issues/36889) | @@ -51,6 +53,8 @@ guides you through the process. | Terraform | [WIP: Merge Request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/18834) | | Vagrant | [#36899](https://gitlab.com/gitlab-org/gitlab/-/issues/36899) | + + ## Enabling the Packages feature NOTE: diff --git a/doc/administration/pages/index.md b/doc/administration/pages/index.md index f63f11f7268..fb9ea5a9096 100644 --- a/doc/administration/pages/index.md +++ b/doc/administration/pages/index.md @@ -190,7 +190,7 @@ outside world. ### Additional configuration for Docker container The GitLab Pages daemon doesn't have permissions to bind mounts when it runs -in a Docker container. To overcome this issue, you must change the chroot +in a Docker container. To overcome this issue, you must change the `chroot` behavior: 1. Edit `/etc/gitlab/gitlab.rb`. @@ -236,7 +236,7 @@ control over how the Pages daemon runs and serves content in your environment. | `gitlab_secret` | The OAuth application secret. Leave blank to automatically fill when Pages authenticates with GitLab. | `gitlab_server` | Server to use for authentication when access control is enabled; defaults to GitLab `external_url`. | `headers` | Specify any additional http headers that should be sent to the client with each response. -| `inplace_chroot` | On [systems that don't support bind-mounts](index.md#additional-configuration-for-docker-container), this instructs GitLab Pages to chroot into its `pages_path` directory. Some caveats exist when using inplace chroot; refer to the GitLab Pages [README](https://gitlab.com/gitlab-org/gitlab-pages/blob/master/README.md#caveats) for more information. +| `inplace_chroot` | On [systems that don't support bind-mounts](index.md#additional-configuration-for-docker-container), this instructs GitLab Pages to `chroot` into its `pages_path` directory. Some caveats exist when using in-place `chroot`; refer to the GitLab Pages [README](https://gitlab.com/gitlab-org/gitlab-pages/blob/master/README.md#caveats) for more information. | `insecure_ciphers` | Use default list of cipher suites, may contain insecure ones like 3DES and RC4. | `internal_gitlab_server` | Internal GitLab server address used exclusively for API requests. Useful if you want to send that traffic over an internal load balancer. Defaults to GitLab `external_url`. | `listen_proxy` | The addresses to listen on for reverse-proxy requests. Pages binds to these addresses' network sockets and receives incoming requests from them. Sets the value of `proxy_pass` in `$nginx-dir/conf/gitlab-pages.conf`. @@ -538,7 +538,7 @@ the below steps to do a no downtime transfer to a new storage location. 1. [Reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure). 1. Verify Pages are still being served up as expected. -1. Unpause Pages deployments by removing from `/etc/gitlab/gitlab.rb` the `sidekiq` setting set above. +1. Resume Pages deployments by removing from `/etc/gitlab/gitlab.rb` the `sidekiq` setting set above. 1. [Reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure). 1. Trigger a new Pages deployment and verify it's working as expected. 1. Remove the old Pages storage location: `sudo rm -rf /var/opt/gitlab/gitlab-rails/shared/pages` @@ -629,7 +629,7 @@ database encryption. Proceed with caution. on the **Pages server** and configure this share to allow access from your main **GitLab server**. Note that the example there is more general and - shares several sub-directories from `/home` to several `/nfs/home` mountpoints. + shares several sub-directories from `/home` to several `/nfs/home` mount points. For our Pages-specific example here, we instead share only the default GitLab Pages folder `/var/opt/gitlab/gitlab-rails/shared/pages` from the **Pages server** and we mount it to `/mnt/pages` @@ -818,7 +818,7 @@ but commented out to help encourage others to add to it in the future. --> ### `open /etc/ssl/ca-bundle.pem: permission denied` -GitLab Pages runs inside a chroot jail, usually in a uniquely numbered directory like +GitLab Pages runs inside a `chroot` jail, usually in a uniquely numbered directory like `/tmp/gitlab-pages-*`. Within the jail, a bundle of trusted certificates is @@ -828,7 +828,7 @@ from `/opt/gitlab/embedded/ssl/certs/cacert.pem` as part of starting up Pages. If the permissions on the source file are incorrect (they should be `0644`), then -the file inside the chroot jail is also wrong. +the file inside the `chroot` jail is also wrong. Pages logs errors in `/var/log/gitlab/gitlab-pages/current` like: @@ -837,7 +837,7 @@ x509: failed to load system roots and no roots provided open /etc/ssl/ca-bundle.pem: permission denied ``` -The use of a chroot jail makes this error misleading, as it is not +The use of a `chroot` jail makes this error misleading, as it is not referring to `/etc/ssl` on the root filesystem. The fix is to correct the source file permissions and restart Pages: @@ -862,8 +862,8 @@ open /opt/gitlab/embedded/ssl/certs/cacert.pem: no such file or directory x509: certificate signed by unknown authority ``` -The reason for those errors is that the files `resolv.conf` and `ca-bundle.pem` are missing inside the chroot. -The fix is to copy the host's `/etc/resolv.conf` and the GitLab certificate bundle inside the chroot: +The reason for those errors is that the files `resolv.conf` and `ca-bundle.pem` are missing inside the `chroot`. +The fix is to copy the host's `/etc/resolv.conf` and the GitLab certificate bundle inside the `chroot`: ```shell sudo mkdir -p /var/opt/gitlab/gitlab-rails/shared/pages/etc/ssl @@ -895,7 +895,7 @@ gitlab_pages['listen_proxy'] = '127.0.0.1:8090' ### 404 error after transferring project to a different group or user If you encounter a `404 Not Found` error a Pages site after transferring a project to -another group or user, you must trigger adomain configuration update for Pages. To do +another group or user, you must trigger a domain configuration update for Pages. To do so, write something in the `.update` file. The Pages daemon monitors for changes to this file, and reloads the configuration when changes occur. @@ -945,8 +945,8 @@ in all of your GitLab Pages instances. ### 500 error with `securecookie: failed to generate random iv` and `Failed to save the session` This problem most likely results from an [out-dated operating system](https://docs.gitlab.com/omnibus/package-information/deprecated_os.html). -The [Pages daemon uses the `securecookie` library](https://gitlab.com/search?group_id=9970&project_id=734943&repository_ref=master&scope=blobs&search=securecookie&snippets=false) to get random strings via [crypto/rand in Go](https://golang.org/pkg/crypto/rand/#pkg-variables). -This requires the `getrandom` syscall or `/dev/urandom` to be available on the host OS. +The [Pages daemon uses the `securecookie` library](https://gitlab.com/search?group_id=9970&project_id=734943&repository_ref=master&scope=blobs&search=securecookie&snippets=false) to get random strings via [`crypto/rand` in Go](https://golang.org/pkg/crypto/rand/#pkg-variables). +This requires the `getrandom` system call or `/dev/urandom` to be available on the host OS. Upgrading to an [officially supported operating system](https://about.gitlab.com/install/) is recommended. ### The requested scope is invalid, malformed, or unknown diff --git a/doc/administration/postgresql/replication_and_failover.md b/doc/administration/postgresql/replication_and_failover.md index 99f5edd35be..e80e38fe5d1 100644 --- a/doc/administration/postgresql/replication_and_failover.md +++ b/doc/administration/postgresql/replication_and_failover.md @@ -46,7 +46,7 @@ Each database node runs three services: `PostgreSQL` - The database itself. -`Patroni` - Communicates with other patroni services in the cluster and handles +`Patroni` - Communicates with other Patroni services in the cluster and handles failover when issues with the leader server occurs. The failover procedure consists of: diff --git a/doc/administration/raketasks/ldap.md b/doc/administration/raketasks/ldap.md index 37598fa99d8..531e9e89020 100644 --- a/doc/administration/raketasks/ldap.md +++ b/doc/administration/raketasks/ldap.md @@ -233,9 +233,9 @@ It can also be used as a receiving application for content encrypted with a KMS: gcloud kms decrypt --key my-key --keyring my-test-kms --plaintext-file=- --ciphertext-file=my-file --location=us-west1 | sudo gitlab-rake gitlab:ldap:secret:write ``` -**gcloud secret integration example** +**Google Cloud secret integration example** -It can also be used as a receiving application for secrets out of gcloud: +It can also be used as a receiving application for secrets out of Google Cloud: ```shell gcloud secrets versions access latest --secret="my-test-secret" > $1 | sudo gitlab-rake gitlab:ldap:secret:write diff --git a/doc/administration/restart_gitlab.md b/doc/administration/restart_gitlab.md index 4f104c6a63f..69b3ae5282f 100644 --- a/doc/administration/restart_gitlab.md +++ b/doc/administration/restart_gitlab.md @@ -140,7 +140,7 @@ your server in `/etc/init.d/gitlab`. --- -If you are using other init systems, like systemd, you can check the +If you are using other init systems, like `systemd`, you can check the [GitLab Recipes](https://gitlab.com/gitlab-org/gitlab-recipes/tree/master/init) repository for some unofficial services. These are **not** officially supported so use them at your own risk. diff --git a/doc/administration/troubleshooting/debug.md b/doc/administration/troubleshooting/debug.md index 8c8fa25aa5e..7b57cdbf17f 100644 --- a/doc/administration/troubleshooting/debug.md +++ b/doc/administration/troubleshooting/debug.md @@ -126,7 +126,7 @@ an SMTP server, but you're not seeing mail delivered. Here's how to check the se For more advanced issues, `gdb` is a must-have tool for debugging issues. -### The GNU Project Debugger (gdb) +### The GNU Project Debugger (GDB) To install on Ubuntu/Debian: @@ -140,9 +140,13 @@ On CentOS: sudo yum install gdb ``` + + ### rbtrace -GitLab 11.2 ships with [rbtrace](https://github.com/tmm1/rbtrace), which + + +GitLab 11.2 ships with [`rbtrace`](https://github.com/tmm1/rbtrace), which allows you to trace Ruby code, view all running threads, take memory dumps, and more. However, this is not enabled by default. To enable it, define the `ENABLE_RBTRACE` variable to the environment. For example, in Omnibus: @@ -175,7 +179,7 @@ downtime. Otherwise skip to the next section. 1. Load the problematic URL 1. Run `sudo gdb -p ` to attach to the Unicorn process. -1. In the gdb window, type: +1. In the GDB window, type: ```plaintext call (void) rb_backtrace() @@ -210,7 +214,7 @@ downtime. Otherwise skip to the next section. ``` Note that if the Unicorn process terminates before you are able to run these -commands, gdb will report an error. To buy more time, you can always raise the +commands, GDB will report an error. To buy more time, you can always raise the Unicorn timeout. For omnibus users, you can edit `/etc/gitlab/gitlab.rb` and increase it from 60 seconds to 300: @@ -246,7 +250,7 @@ separate Rails process to debug the issue: ``` 1. In a new window, run `top`. It should show this Ruby process using 100% CPU. Write down the PID. -1. Follow step 2 from the previous section on using gdb. +1. Follow step 2 from the previous section on using GDB. ### GitLab: API is not accessible @@ -279,4 +283,4 @@ The output in `/tmp/unicorn.txt` may help diagnose the root cause. ## More information - [Debugging Stuck Ruby Processes](https://blog.newrelic.com/engineering/debugging-stuck-ruby-processes-what-to-do-before-you-kill-9/) -- [Cheatsheet of using gdb and Ruby processes](gdb-stuck-ruby.txt) +- [Cheat sheet of using GDB and Ruby processes](gdb-stuck-ruby.txt) diff --git a/doc/administration/troubleshooting/linux_cheat_sheet.md b/doc/administration/troubleshooting/linux_cheat_sheet.md index c61a78624c3..22069dd571a 100644 --- a/doc/administration/troubleshooting/linux_cheat_sheet.md +++ b/doc/administration/troubleshooting/linux_cheat_sheet.md @@ -55,7 +55,7 @@ chown root:git chmod u+x ``` -### Files & Dirs +### Files and directories ```shell # create a new directory and all subdirectories @@ -202,7 +202,7 @@ or you can build it from source if you have the Rust compiler. First run the tool with no arguments other than the strace output filename to get a summary of the top processes sorted by time spent actively performing tasks. You -can also sort based on total time, # of syscalls made, PID #, and # of child processes +can also sort based on total time, # of system calls made, PID #, and # of child processes using the `-S` or `--sort` flag. The number of results defaults to 25 processes, but can be changed using the `-c`/`--count` option. See `--help` for full details. @@ -220,7 +220,7 @@ Top 25 PIDs ... ``` -Based on the summary, you can then view the details of syscalls made by one or more +Based on the summary, you can then view the details of system calls made by one or more processes using the `-p`/`--pid` for a specific process, or `-s`/`--stats` flags for a sorted list. `--stats` takes the same sorting and count options as summary. diff --git a/doc/api/README.md b/doc/api/README.md index 436893e48d1..e2ec30ce8ad 100644 --- a/doc/api/README.md +++ b/doc/api/README.md @@ -207,7 +207,7 @@ to authenticate with the API: - [Go Proxy](../user/packages/go_proxy/index.md) - [Maven Repository](../user/packages/maven_repository/index.md#authenticate-with-a-ci-job-token-in-maven) - [NPM Repository](../user/packages/npm_registry/index.md#authenticate-with-a-ci-job-token) - - [Nuget Repository](../user/packages/nuget_repository/index.md) + - [NuGet Repository](../user/packages/nuget_repository/index.md) - [PyPI Repository](../user/packages/pypi_repository/index.md#authenticate-with-a-ci-job-token) - [Generic packages](../user/packages/generic_packages/index.md#publish-a-generic-package-by-using-cicd) - [Get job artifacts](job_artifacts.md#get-job-artifacts) diff --git a/doc/api/events.md b/doc/api/events.md index d073e7ed633..e2ff779f3bf 100644 --- a/doc/api/events.md +++ b/doc/api/events.md @@ -25,7 +25,7 @@ Available action types for the `action` parameter are: - `destroyed` - `expired` -Note that these options are downcased. +Note that these options are in lower case. ### Target Types @@ -39,7 +39,7 @@ Available target types for the `target_type` parameter are: - `snippet` - `user` -Note that these options are downcased. +Note that these options are in lower case. ### Date formatting diff --git a/doc/api/features.md b/doc/api/features.md index 0ed0dec1b6d..cb3ee04d076 100644 --- a/doc/api/features.md +++ b/doc/api/features.md @@ -128,7 +128,7 @@ POST /features/:name | `user` | string | no | A GitLab username | | `group` | string | no | A GitLab group's path, for example `gitlab-org` | | `project` | string | no | A projects path, for example `gitlab-org/gitlab-foss` | -| `force` | boolean | no | Skip feature flag validation checks, ie. YAML definition | +| `force` | boolean | no | Skip feature flag validation checks, such as a YAML definition | Note that you can enable or disable a feature for a `feature_group`, a `user`, a `group`, and a `project` in a single API call. diff --git a/doc/api/group_badges.md b/doc/api/group_badges.md index 7698fa9ba5f..e3fcaa3db37 100644 --- a/doc/api/group_badges.md +++ b/doc/api/group_badges.md @@ -12,11 +12,15 @@ info: To determine the technical writer assigned to the Stage/Group associated w Badges support placeholders that are replaced in real time in both the link and image URL. The allowed placeholders are: + + - **%{project_path}**: replaced by the project path. - **%{project_id}**: replaced by the project ID. - **%{default_branch}**: replaced by the project default branch. - **%{commit_sha}**: replaced by the last project's commit SHA. + + Because these endpoints aren't inside a project's context, the information used to replace the placeholders comes from the first group's project by creation date. If the group hasn't got any project the original URL with the placeholders is returned. diff --git a/doc/api/members.md b/doc/api/members.md index 47b686d9275..87d8fc64bad 100644 --- a/doc/api/members.md +++ b/doc/api/members.md @@ -319,7 +319,7 @@ POST /projects/:id/members | `id` | integer/string | yes | The ID or [URL-encoded path of the project or group](README.md#namespaced-path-encoding) owned by the authenticated user | | `user_id` | integer/string | yes | The user ID of the new member or multiple IDs separated by commas | | `access_level` | integer | yes | A valid access level | -| `expires_at` | string | no | A date string in the format YEAR-MONTH-DAY | +| `expires_at` | string | no | A date string in the format `YEAR-MONTH-DAY` | ```shell curl --request POST --header "PRIVATE-TOKEN: " --data "user_id=1&access_level=30" "https://gitlab.example.com/api/v4/groups/:id/members" @@ -357,7 +357,7 @@ PUT /projects/:id/members/:user_id | `id` | integer/string | yes | The ID or [URL-encoded path of the project or group](README.md#namespaced-path-encoding) owned by the authenticated user | | `user_id` | integer | yes | The user ID of the member | | `access_level` | integer | yes | A valid access level | -| `expires_at` | string | no | A date string in the format YEAR-MONTH-DAY | +| `expires_at` | string | no | A date string in the format `YEAR-MONTH-DAY` | ```shell curl --request PUT --header "PRIVATE-TOKEN: " "https://gitlab.example.com/api/v4/groups/:id/members/:user_id?access_level=40" diff --git a/doc/api/project_templates.md b/doc/api/project_templates.md index d75047d6cb3..6c423f0b058 100644 --- a/doc/api/project_templates.md +++ b/doc/api/project_templates.md @@ -34,7 +34,7 @@ GET /projects/:id/templates/:type | Attribute | Type | Required | Description | | ---------- | ------ | -------- | ----------- | | `id` | integer / string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) | -| `type` | string | yes| The type `(dockerfiles|gitignores|gitlab_ci_ymls|licenses|issues|merge_requests)` of the template | +| `type` | string | yes | The type `(dockerfiles|gitignores|gitlab_ci_ymls|licenses|issues|merge_requests)` of the template | Example response (licenses): diff --git a/doc/api/repositories.md b/doc/api/repositories.md index efdf010e072..6bbd4c56e40 100644 --- a/doc/api/repositories.md +++ b/doc/api/repositories.md @@ -5,7 +5,7 @@ info: "To determine the technical writer assigned to the Stage/Group associated type: reference, api --- -# Repositories API +# Repositories API **(CORE)** ## List repository tree @@ -18,14 +18,15 @@ This command provides essentially the same functionality as the `git ls-tree` co GET /projects/:id/repository/tree ``` -Parameters: +Supported attributes: -- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user -- `path` (optional) - The path inside repository. Used to get content of subdirectories -- `ref` (optional) - The name of a repository branch or tag or if not given the default branch -- `recursive` (optional) - Boolean value used to get a recursive tree (false by default) -- `per_page` (optional) - Number of results to show per page. If not specified, defaults to `20`. - Read more on [pagination](README.md#pagination). +| Attribute | Type | Required | Description | +| :---------- | :------------- | :------- | :---------- | +| `id` | integer/string | no | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. | +| `path` | string | yes | The path inside repository. Used to get content of subdirectories. | +| `ref` | string | yes | The name of a repository branch or tag or if not given the default branch. | +| `recursive` | boolean | yes | Boolean value used to get a recursive tree (false by default). | +| `per_page` | integer | yes | Number of results to show per page. If not specified, defaults to `20`. [Learn more on pagination](README.md#pagination). | ```json [ @@ -91,10 +92,12 @@ without authentication if the repository is publicly accessible. GET /projects/:id/repository/blobs/:sha ``` -Parameters: +Supported attributes: -- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user -- `sha` (required) - The blob SHA +| Attribute | Type | Required | Description | +| :-------- | :------------- | :------- | :---------- | +| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. | +| `sha` | string | yes | The blob SHA. | ## Raw blob content @@ -105,10 +108,12 @@ without authentication if the repository is publicly accessible. GET /projects/:id/repository/blobs/:sha/raw ``` -Parameters: +Supported attributes: -- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user -- `sha` (required) - The blob SHA +| Attribute | Type | Required | Description | +| :-------- | :------- | :------- | :---------- | +| `id` | datatype | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. | +| `sha` | datatype | yes | The blob SHA. | ## Get file archive @@ -128,10 +133,14 @@ GET /projects/:id/repository/archive[.format] `bz2`, `tar`, and `zip`. For example, specifying `archive.zip` would send an archive in ZIP format. -Parameters: +Supported attributes: -- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user -- `sha` (optional) - The commit SHA to download. A tag, branch reference, or SHA can be used. This defaults to the tip of the default branch if not specified. For example: +| Attribute | Type | Required | Description | +|:------------|:---------------|:---------|:----------------------| +| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. | +| `sha` | string | no | The commit SHA to download. A tag, branch reference, or SHA can be used. This defaults to the tip of the default branch if not specified. | + +Example request: ```shell curl --header "PRIVATE-TOKEN: " "https://gitlab.com/api/v4/projects//repository/archive?sha=" @@ -146,21 +155,22 @@ publicly accessible. Note that diffs could have an empty diff string if [diff li GET /projects/:id/repository/compare ``` -Parameters: +Supported attributes: -- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user -- `from` (required) - the commit SHA or branch name -- `to` (required) - the commit SHA or branch name -- `straight` (optional) - comparison method, `true` for direct comparison between `from` and `to` (`from`..`to`), `false` to compare using merge base (`from`...`to`)'. Default is `false`. +| Attribute | Type | Required | Description | +| :--------- | :------------- | :------- | :---------- | +| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. | +| `from` | string | yes | The commit SHA or branch name. | +| `to` | string | yes | The commit SHA or branch name. | +| `straight` | boolean | no | Comparison method, `true` for direct comparison between `from` and `to` (`from`..`to`), `false` to compare using merge base (`from`...`to`)'. Default is `false`. | ```plaintext GET /projects/:id/repository/compare?from=master&to=feature ``` -Response: +Example response: ```json - { "commit": { "id": "12d65c8dd2b2676fa3ac47d955accc085a37a9c1", @@ -203,15 +213,17 @@ GET /projects/:id/repository/contributors ``` WARNING: -The `additions` and `deletions` attributes are deprecated [as of GitLab 13.4](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39653) because they [always return `0`](https://gitlab.com/gitlab-org/gitlab/-/issues/233119). +The `additions` and `deletions` attributes are deprecated [as of GitLab 13.4](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/39653), because they [always return `0`](https://gitlab.com/gitlab-org/gitlab/-/issues/233119). -Parameters: +Supported attributes: -- `id` (required) - The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user -- `order_by` (optional) - Return contributors ordered by `name`, `email`, or `commits` (orders by commit date) fields. Default is `commits` -- `sort` (optional) - Return contributors sorted in `asc` or `desc` order. Default is `asc` +| Attribute | Type | Required | Description | +| :--------- | :------------- | :------- | :---------- | +| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. | +| `order_by` | string | no | Return contributors ordered by `name`, `email`, or `commits` (orders by commit date) fields. Default is `commits`. | +| `sort` | string | no | Return contributors sorted in `asc` or `desc` order. Default is `asc`. | -Response: +Example response: ```json [{ @@ -237,10 +249,12 @@ Get the common ancestor for 2 or more refs (commit SHAs, branch names or tags). GET /projects/:id/repository/merge_base ``` -| Attribute | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) | -| `refs` | array | yes | The refs to find the common ancestor of, multiple refs can be passed | +| Attribute | Type | Required | Description | +| --------- | -------------- | -------- | ------------------------------------------------------------------------------- | +| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) | +| `refs` | array | yes | The refs to find the common ancestor of, multiple refs can be passed | + +Example request: ```shell curl --header "PRIVATE-TOKEN: " "https://gitlab.example.com/api/v4/projects/5/repository/merge_base?refs[]=304d257dcb821665ab5110318fc58a007bd104ed&refs[]=0031876facac3f2b2702a0e53a26e89939a42209" @@ -264,3 +278,252 @@ Example response: "committed_date": "2014-02-27T08:03:18.000Z" } ``` + +## Generate changelog data + +> - [Introduced](https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/351) in GitLab 13.9. +> - It's [deployed behind a feature flag](../user/feature_flags.md), disabled by default. +> - It's disabled on GitLab.com. +> - It's not yet recommended for production use. +> - To use it in GitLab self-managed instances, ask a GitLab administrator to [enable it](#enable-or-disable-generating-changelog-data). + +WARNING: +This feature might not be available to you. Check the **version history** note above for details. + +Generate changelog data based on commits in a repository. + +Given a version (using semantic versioning) and a range of commits, +GitLab generates a changelog for all commits that use a particular +[Git trailer](https://git-scm.com/docs/git-interpret-trailers). + +The output of this process is a new section in a changelog file in the Git +repository of the given project. The output format is in Markdown, and can be +customized. + +```plaintext +POST /projects/:id/repository/changelog +``` + +Supported attributes: + +| Attribute | Type | Required | Description | +| :-------- | :------- | :--------- | :---------- | +| `version` | string | yes | The version to generate the changelog for. The format must follow [semantic versioning](https://semver.org/). | +| `from` | string | yes | The start of the range of commits (as a SHA) to use for generating the changelog. This commit itself isn't included in the list. | +| `to` | string | yes | The end of the range of commits (as a SHA) to use for the changelog. This commit _is_ included in the list. | +| `date` | datetime | no | The date and time of the release, defaults to the current time. | +| `branch` | string | no | The branch to commit the changelog changes to, defaults to the project's default branch. | +| `trailer` | string | no | The Git trailer to use for including commits, defaults to `Changelog`. | +| `file` | string | no | The file to commit the changes to, defaults to `CHANGELOG.md`. | +| `message` | string | no | The commit message to produce when committing the changes, defaults to `Add changelog for version X` where X is the value of the `version` argument. | + +### How it works + +Changelogs are generated based on commit titles. Commits are only included if +they contain a specific Git trailer. GitLab uses the value of this trailer to +categorize the changes. + +GitLab uses Git trailers, because Git trailers are +supported by Git out of the box. We use commits as input, as this is the only +source of data every project uses. In addition, commits can be retrieved when +operating on a mirror. This is important for GitLab itself, because during a security +release we might need to include changes from both public projects and private +security mirrors. + +Changelogs are generated by taking the title of the commits to include and using +these as the changelog entries. You can enrich entries with additional data, +such as a link to the merge request or details about the commit author. You can +[customize the format of a changelog](#customize-the-changelog-output) section with a template. + +### Customize the changelog output + +The output is customized using a YAML configuration file stored in your +project's Git repository. This file must reside in +`.gitlab/changelog_config.yml`. + +You can set the following variables in this file: + +- `date_format`: the date format to use in the title of the newly added + changelog data. This uses regular `strftime` formatting. +- `template`: a custom template to use for generating the changelog data. +- `categories`: a hash that maps raw category names to the names to use in the + changelog. + +Using the default settings, generating a changelog results in a section along +the lines of the following: + +```markdown +## 1.0.0 (2021-01-05) + +### Features (4 changes) + +- [Feature 1](gitlab-org/gitlab@123abc) by @alice ([merge request](gitlab-org/gitlab!123)) +- [Feature 2](gitlab-org/gitlab@456abc) ([merge request](gitlab-org/gitlab!456)) +- [Feature 3](gitlab-org/gitlab@234abc) by @steve +- [Feature 4](gitlab-org/gitlab@456) +``` + +Each section starts with a title that contains the version and release date. +While the format of the date can be customized, the rest of the title can't be +changed. When adding a new section, GitLab parses these titles to determine +where in the file the new section should be placed. GitLab sorts sections +according to their versions, not their dates. + +Each section can have categories, each with their +corresponding changes. In the above example, "Features" is one such category. +You can customize the format of these sections. + +The section names are derived from the values of the Git trailer used to include +or exclude commits. + +For example, if the trailer to use is called `Changelog`, +and its value is `feature`, then the commit is grouped in the `feature` +category. The names of these raw values might differ from what you want to +show in a changelog, you can remap them. Let's say we use the `Changelog` +trailer and developers use the following values: `feature`, `bug`, and +`performance`. + +You can remap these using the following YAML configuration file: + +```yaml +--- +categories: + feature: Features + bug: Bug fixes + performance: Performance improvements +``` + +When generating the changelog data, the category titles are then `### Features`, +`### Bug fixes`, and `### Performance improvements`. + +### Custom templates + +The category sections are generated using a template. The default template is as +follows: + +```plaintext +{% if categories %} +{% each categories %} +### {{ title }} ({% if single_change %}1 change{% else %}{{ count }} changes{% end %}) + +{% each entries %} +- [{{ title }}]({{ commit.reference }})\ +{% if author.contributor %} by {{ author.reference }}{% end %}\ +{% if merge_request %} ([merge request]({{ merge_request.reference }})){% end %} +{% end %} + +{% end %} +{% else %} +No changes. +{% end %} +``` + +The `{% ... %}` tags are for statements, and `{{ ... }}` is used for printing +data. Statements must be terminated using a `{% end %}` tag. Both the `if` and +`each` statements require a single argument. + +For example, if we have a variable `valid`, and we want to display "yes" +when this value is true, and display "nope" otherwise. We can do so as follows: + +```plaintext +{% if valid %} +yes +{% else %} +nope +{% end %} +``` + +The use of `else` is optional. A value is considered true when it's a non-empty +value or boolean `true`. Empty arrays and hashes are considered false. + +Looping is done using `each`, and variables inside a loop are scoped to it. +Referring to the current value in a loop is done using the variable tag `{{ it +}}`. Other variables read their value from the current loop value. Take +this template for example: + +```plaintext +{% each users %} +{{name}} +{% end %} +``` + +Assuming `users` is an array of objects, each with a `name` field, this would +then print the name of every user. + +Using variable tags, you can access nested objects. For example, `{{ +users.0.name }}` prints the name of the first user in the `users` variable. + +If a line ends in a backslash, the next newline is ignored. This allows you to +wrap code across multiple lines, without introducing unnecessary newlines in the +Markdown output. + +You can specify a custom template in your configuration like so: + +```yaml +--- +template: > + {% if categories %} + {% each categories %} + ### {{ title }} + + {% each entries %} + - [{{ title }}]({{ commit.reference }})\ + {% if author.contributor %} by {{ author.reference }}{% end %} + {% end %} + + {% end %} + {% else %} + No changes. + {% end %} +``` + +### Template data + +At the top level, the following variable is available: + +- `categories`: an array of objects, one for every changelog category. + +In a category, the following variables are available: + +- `title`: the title of the category (after it has been remapped). +- `count`: the number of entries in this category. +- `single_change`: a boolean that indicates if there is only one change (`true`), + or multiple changes (`false`). +- `entries`: the entries that belong to this category. + +In an entry, the following variables are available (here `foo.bar` means that +`bar` is a sub-field of `foo`): + +- `title`: the title of the changelog entry (this is the commit title). +- `commit.reference`: a reference to the commit, for example, + `gitlab-org/gitlab@0a4cdd86ab31748ba6dac0f69a8653f206e5cfc7`. +- `commit.trailers`: an object containing all the Git trailers that were present + in the commit body. +- `author.reference`: a reference to the commit author (for example, `@alice`). +- `author.contributor`: a boolean set to `true` when the author is an external + contributor, otherwise this is set to `false`. +- `merge_request.reference`: a reference to the merge request that first + introduced the change (for example, `gitlab-org/gitlab!50063`). + +The `author` and `merge_request` objects might not be present if the data couldn't +be determined (for example, when a commit was created without a corresponding merge +request). + +### Enable or disable generating changelog data **(CORE ONLY)** + +This feature is under development and not ready for production use. It is +deployed behind a feature flag that is **disabled by default**. +[GitLab administrators with access to the GitLab Rails console](../administration/feature_flags.md) +can enable it. + +To enable it for a project: + +```ruby +Feature.enable(:changelog_api, Project.find(id_of_the_project)) +``` + +To disable it for a project: + +```ruby +Feature.disable(:changelog_api, Project.find(id_of_the_project)) +``` diff --git a/doc/api/v3_to_v4.md b/doc/api/v3_to_v4.md index 00e70d34db6..c63a04228a5 100644 --- a/doc/api/v3_to_v4.md +++ b/doc/api/v3_to_v4.md @@ -71,7 +71,7 @@ Below are the changes made between V3 and V4. - Notes do not return deprecated field `upvote` and `downvote` [!9384](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9384) - Return HTTP status code `400` for all validation errors when creating or updating a member instead of sometimes `422` error. [!9523](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9523) - Remove `GET /groups/owned`. Use `GET /groups?owned=true` instead [!9505](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9505) -- Return 202 with JSON body on async removals on V4 API (`DELETE /projects/:id/repository/merged_branches` and `DELETE /projects/:id`) [!9449](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9449) +- Return 202 with JSON body on asynchronous removals on V4 API (`DELETE /projects/:id/repository/merged_branches` and `DELETE /projects/:id`) [!9449](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9449) - `GET /projects/:id/milestones?iid[]=x&iid[]=y` array filter has been renamed to `iids` [!9096](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9096) - Return basic information about pipeline in `GET /projects/:id/pipelines` [!8875](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/8875) - Renamed all `build` references to `job` [!9463](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9463) diff --git a/doc/development/utilities.md b/doc/development/utilities.md index dc76f26831f..f0f71842d69 100644 --- a/doc/development/utilities.md +++ b/doc/development/utilities.md @@ -109,6 +109,48 @@ Refer to [`override.rb`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gi Because only a class or prepended module can actually override a method. Including or extending a module into another cannot override anything. +### Interactions with `ActiveSupport::Concern`, `prepend`, and `class_methods` + +When you use `ActiveSupport::Concern` that includes class methods, you do not +get expected results because `ActiveSupport::Concern` doesn't work like a +regular Ruby module. + +Since we already have `Prependable` as a patch for `ActiveSupport::Concern` +to enable `prepend`, it has consequences with how it would interact with +`override` and `class_methods`. We add a workaround directly into +`Prependable` to resolve the problem, by `extend`ing `ClassMethods` into the +defining module. + +This allows us to use `override` to verify `class_methods` used in the +context mentioned above. This workaround only applies when we run the +verification, not when running the application itself. + +Here are example code blocks that demonstrate the effect of this workaround: +following codes: + +```ruby +module Base + extend ActiveSupport::Concern + + class_methods do + def f + end + end +end + +module Derived + include Base +end + +# Without the workaround +Base.f # => NoMethodError +Derived.f # => nil + +# With the workaround +Base.f # => nil +Derived.f # => nil +``` + ## `StrongMemoize` Refer to [`strong_memoize.rb`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/utils/strong_memoize.rb): diff --git a/doc/user/group/index.md b/doc/user/group/index.md index 96e3f8250f6..491576126b4 100644 --- a/doc/user/group/index.md +++ b/doc/user/group/index.md @@ -555,7 +555,7 @@ username, you can create a new group and transfer projects to it. You can change settings that are specific to repositories in your group. -#### Custom initial branch name **(FREE SELF)** +#### Custom initial branch name **(FREE)** > - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/43290) in GitLab 13.6. diff --git a/lib/api/repositories.rb b/lib/api/repositories.rb index 8af8ffc3b63..353f2ed1c25 100644 --- a/lib/api/repositories.rb +++ b/lib/api/repositories.rb @@ -170,6 +170,67 @@ module API not_found!("Merge Base") end end + + desc 'Generates a changelog section for a release' do + detail 'This feature was introduced in GitLab 13.9' + end + params do + requires :version, + type: String, + regexp: Gitlab::Regex.unbounded_semver_regex, + desc: 'The version of the release, using the semantic versioning format' + + requires :from, + type: String, + desc: 'The first commit in the range of commits to use for the changelog' + + requires :to, + type: String, + desc: 'The last commit in the range of commits to use for the changelog' + + optional :date, + type: DateTime, + desc: 'The date and time of the release' + + optional :branch, + type: String, + desc: 'The branch to commit the changelog changes to' + + optional :trailer, + type: String, + desc: 'The Git trailer to use for determining if commits are to be included in the changelog', + default: ::Repositories::ChangelogService::DEFAULT_TRAILER + + optional :file, + type: String, + desc: 'The file to commit the changelog changes to', + default: ::Repositories::ChangelogService::DEFAULT_FILE + + optional :message, + type: String, + desc: 'The commit message to use when committing the changelog' + end + post ':id/repository/changelog' do + not_found! unless Feature.enabled?(:changelog_api, user_project) + + branch = params[:branch] || user_project.default_branch_or_master + access = Gitlab::UserAccess.new(current_user, container: user_project) + + unless access.can_push_to_branch?(branch) + forbidden!("You are not allowed to commit a changelog on this branch") + end + + service = ::Repositories::ChangelogService.new( + user_project, + current_user, + **declared_params(include_missing: false) + ) + + service.execute + status(200) + rescue => ex + render_api_error!("Failed to generate the changelog: #{ex.message}", 500) + end end end end diff --git a/lib/gitlab/changelog/committer.rb b/lib/gitlab/changelog/committer.rb index d2563590bed..617017faa58 100644 --- a/lib/gitlab/changelog/committer.rb +++ b/lib/gitlab/changelog/committer.rb @@ -26,7 +26,13 @@ module Gitlab # scratch, otherwise we may end up throwing away changes. As such, all # the logic is contained within the retry block. Retriable.retriable(on: CommitError) do - commit = @project.commit(branch) + commit = Gitlab::Git::Commit.last_for_path( + @project.repository, + branch, + file, + literal_pathspec: true + ) + content = blob_content(file, commit) # If the release has already been added (e.g. concurrently by another diff --git a/lib/gitlab/changelog/config.rb b/lib/gitlab/changelog/config.rb index ac62572576e..3f06b612687 100644 --- a/lib/gitlab/changelog/config.rb +++ b/lib/gitlab/changelog/config.rb @@ -37,7 +37,10 @@ module Gitlab end if (template = hash['template']) - config.template = Template::Compiler.new.compile(template) + # We use the full namespace here (and further down) as otherwise Rails + # may use the wrong constant when autoloading is used. + config.template = + ::Gitlab::Changelog::Template::Compiler.new.compile(template) end if (categories = hash['categories']) @@ -54,7 +57,8 @@ module Gitlab def initialize(project) @project = project @date_format = DEFAULT_DATE_FORMAT - @template = Template::Compiler.new.compile(DEFAULT_TEMPLATE) + @template = + ::Gitlab::Changelog::Template::Compiler.new.compile(DEFAULT_TEMPLATE) @categories = {} end diff --git a/lib/gitlab/changelog/template/compiler.rb b/lib/gitlab/changelog/template/compiler.rb index f67bab0f29f..fa7724aa2da 100644 --- a/lib/gitlab/changelog/template/compiler.rb +++ b/lib/gitlab/changelog/template/compiler.rb @@ -98,19 +98,27 @@ module Gitlab ESCAPED_NEWLINE = /\\\n$/.freeze # The start tag for ERB tags. These tags will be escaped, preventing - # users FROM USING erb DIRECTLY. - ERB_START_TAG = '<%' + # users from using ERB directly. + ERB_START_TAG = /<\\?\s*\\?\s*%/.freeze def compile(template) transformed_lines = ['<% it = variables %>'] + # ERB tags must be stripped here, otherwise a user may introduce ERB + # tags by making clever use of whitespace. See + # https://gitlab.com/gitlab-org/gitlab/-/issues/300224 for more + # information. + template = template.gsub(ERB_START_TAG, '<%%') + template.each_line { |line| transformed_lines << transform(line) } - Template.new(transformed_lines.join) + + # We use the full namespace here as otherwise Rails may use the wrong + # constant when autoloading is used. + ::Gitlab::Changelog::Template::Template.new(transformed_lines.join) end def transform(line) line.gsub!(ESCAPED_NEWLINE, '') - line.gsub!(ERB_START_TAG, '<%%') # This replacement ensures that "end" blocks on their own lines # don't add extra newlines. Using an ERB -%> tag sadly swallows too diff --git a/lib/gitlab/database/migration_helpers/v2.rb b/lib/gitlab/database/migration_helpers/v2.rb new file mode 100644 index 00000000000..f20a9b30fa7 --- /dev/null +++ b/lib/gitlab/database/migration_helpers/v2.rb @@ -0,0 +1,219 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module MigrationHelpers + module V2 + include Gitlab::Database::MigrationHelpers + + # Renames a column without requiring downtime. + # + # Concurrent renames work by using database triggers to ensure both the + # old and new column are in sync. However, this method will _not_ remove + # the triggers or the old column automatically; this needs to be done + # manually in a post-deployment migration. This can be done using the + # method `cleanup_concurrent_column_rename`. + # + # table - The name of the database table containing the column. + # old_column - The old column name. + # new_column - The new column name. + # type - The type of the new column. If no type is given the old column's + # type is used. + # batch_column_name - option is for tables without primary key, in this + # case another unique integer column can be used. Example: :user_id + def rename_column_concurrently(table, old_column, new_column, type: nil, batch_column_name: :id) + setup_renamed_column(__callee__, table, old_column, new_column, type, batch_column_name) + + with_lock_retries do + install_bidirectional_triggers(table, old_column, new_column) + end + end + + # Reverses operations performed by rename_column_concurrently. + # + # This method takes care of removing previously installed triggers as well + # as removing the new column. + # + # table - The name of the database table. + # old_column - The name of the old column. + # new_column - The name of the new column. + def undo_rename_column_concurrently(table, old_column, new_column) + teardown_rename_mechanism(table, old_column, new_column, column_to_remove: new_column) + end + + # Cleans up a concurrent column name. + # + # This method takes care of removing previously installed triggers as well + # as removing the old column. + # + # table - The name of the database table. + # old_column - The name of the old column. + # new_column - The name of the new column. + def cleanup_concurrent_column_rename(table, old_column, new_column) + teardown_rename_mechanism(table, old_column, new_column, column_to_remove: old_column) + end + + # Reverses the operations performed by cleanup_concurrent_column_rename. + # + # This method adds back the old_column removed + # by cleanup_concurrent_column_rename. + # It also adds back the triggers that are removed + # by cleanup_concurrent_column_rename. + # + # table - The name of the database table containing the column. + # old_column - The old column name. + # new_column - The new column name. + # type - The type of the old column. If no type is given the new column's + # type is used. + # batch_column_name - option is for tables without primary key, in this + # case another unique integer column can be used. Example: :user_id + # + def undo_cleanup_concurrent_column_rename(table, old_column, new_column, type: nil, batch_column_name: :id) + setup_renamed_column(__callee__, table, new_column, old_column, type, batch_column_name) + + with_lock_retries do + install_bidirectional_triggers(table, old_column, new_column) + end + end + + private + + def setup_renamed_column(calling_operation, table, old_column, new_column, type, batch_column_name) + if transaction_open? + raise "#{calling_operation} can not be run inside a transaction" + end + + column = columns(table).find { |column| column.name == old_column.to_s } + + unless column + raise "Column #{old_column} does not exist on #{table}" + end + + if column.default + raise "#{calling_operation} does not currently support columns with default values" + end + + unless column_exists?(table, batch_column_name) + raise "Column #{batch_column_name} does not exist on #{table}" + end + + check_trigger_permissions!(table) + + unless column_exists?(table, new_column) + create_column_from(table, old_column, new_column, type: type, batch_column_name: batch_column_name) + end + end + + def teardown_rename_mechanism(table, old_column, new_column, column_to_remove:) + return unless column_exists?(table, column_to_remove) + + with_lock_retries do + check_trigger_permissions!(table) + + remove_bidirectional_triggers(table, old_column, new_column) + + remove_column(table, column_to_remove) + end + end + + def install_bidirectional_triggers(table, old_column, new_column) + insert_trigger_name, update_old_trigger_name, update_new_trigger_name = + bidirectional_trigger_names(table, old_column, new_column) + + quoted_table = quote_table_name(table) + quoted_old = quote_column_name(old_column) + quoted_new = quote_column_name(new_column) + + create_insert_trigger(insert_trigger_name, quoted_table, quoted_old, quoted_new) + create_update_trigger(update_old_trigger_name, quoted_table, quoted_new, quoted_old) + create_update_trigger(update_new_trigger_name, quoted_table, quoted_old, quoted_new) + end + + def remove_bidirectional_triggers(table, old_column, new_column) + insert_trigger_name, update_old_trigger_name, update_new_trigger_name = + bidirectional_trigger_names(table, old_column, new_column) + + quoted_table = quote_table_name(table) + + drop_trigger(insert_trigger_name, quoted_table) + drop_trigger(update_old_trigger_name, quoted_table) + drop_trigger(update_new_trigger_name, quoted_table) + end + + def bidirectional_trigger_names(table, old_column, new_column) + %w[insert update_old update_new].map do |operation| + 'trigger_' + Digest::SHA256.hexdigest("#{table}_#{old_column}_#{new_column}_#{operation}").first(12) + end + end + + def function_name_for_trigger(trigger_name) + "function_for_#{trigger_name}" + end + + def create_insert_trigger(trigger_name, quoted_table, quoted_old_column, quoted_new_column) + function_name = function_name_for_trigger(trigger_name) + + execute(<<~SQL) + CREATE OR REPLACE FUNCTION #{function_name}() + RETURNS trigger + LANGUAGE plpgsql + AS $$ + BEGIN + IF NEW.#{quoted_old_column} IS NULL AND NEW.#{quoted_new_column} IS NOT NULL THEN + NEW.#{quoted_old_column} = NEW.#{quoted_new_column}; + END IF; + + IF NEW.#{quoted_new_column} IS NULL AND NEW.#{quoted_old_column} IS NOT NULL THEN + NEW.#{quoted_new_column} = NEW.#{quoted_old_column}; + END IF; + + RETURN NEW; + END + $$; + + DROP TRIGGER IF EXISTS #{trigger_name} + ON #{quoted_table}; + + CREATE TRIGGER #{trigger_name} + BEFORE INSERT ON #{quoted_table} + FOR EACH ROW EXECUTE FUNCTION #{function_name}(); + SQL + end + + def create_update_trigger(trigger_name, quoted_table, quoted_source_column, quoted_target_column) + function_name = function_name_for_trigger(trigger_name) + + execute(<<~SQL) + CREATE OR REPLACE FUNCTION #{function_name}() + RETURNS trigger + LANGUAGE plpgsql + AS $$ + BEGIN + NEW.#{quoted_target_column} := NEW.#{quoted_source_column}; + RETURN NEW; + END + $$; + + DROP TRIGGER IF EXISTS #{trigger_name} + ON #{quoted_table}; + + CREATE TRIGGER #{trigger_name} + BEFORE UPDATE OF #{quoted_source_column} ON #{quoted_table} + FOR EACH ROW EXECUTE FUNCTION #{function_name}(); + SQL + end + + def drop_trigger(trigger_name, quoted_table) + function_name = function_name_for_trigger(trigger_name) + + execute(<<~SQL) + DROP TRIGGER IF EXISTS #{trigger_name} + ON #{quoted_table}; + + DROP FUNCTION IF EXISTS #{function_name}; + SQL + end + end + end + end +end diff --git a/lib/gitlab/patch/prependable.rb b/lib/gitlab/patch/prependable.rb index 22ece0a6a8b..dde78cd9178 100644 --- a/lib/gitlab/patch/prependable.rb +++ b/lib/gitlab/patch/prependable.rb @@ -39,9 +39,14 @@ module Gitlab def class_methods super + class_methods_module = const_get(:ClassMethods, false) + if instance_variable_defined?(:@_prepended_class_methods) - const_get(:ClassMethods, false).prepend @_prepended_class_methods + class_methods_module.prepend @_prepended_class_methods end + + # Hack to resolve https://gitlab.com/gitlab-org/gitlab/-/issues/23932 + extend class_methods_module if ENV['STATIC_VERIFICATION'] end def prepended(base = nil, &block) diff --git a/lib/gitlab/utils/override.rb b/lib/gitlab/utils/override.rb index 784a6686962..c92865636d0 100644 --- a/lib/gitlab/utils/override.rb +++ b/lib/gitlab/utils/override.rb @@ -153,7 +153,13 @@ module Gitlab def extended(mod = nil) super - queue_verification(mod.singleton_class) if mod + # Hack to resolve https://gitlab.com/gitlab-org/gitlab/-/issues/23932 + is_not_concern_hack = + (mod.is_a?(Class) || !name&.end_with?('::ClassMethods')) + + if mod && is_not_concern_hack + queue_verification(mod.singleton_class) + end end def queue_verification(base, verify: false) @@ -174,7 +180,7 @@ module Gitlab end def self.verify! - extensions.values.each(&:verify!) + extensions.each_value(&:verify!) end end end diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 8b6a60b8003..83ff2b261c6 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -19884,9 +19884,6 @@ msgstr "" msgid "No start date" msgstr "" -msgid "No status" -msgstr "" - msgid "No template" msgstr "" @@ -20804,9 +20801,6 @@ msgstr "" msgid "PackageRegistry|Delete package" msgstr "" -msgid "PackageRegistry|Filter by name" -msgstr "" - msgid "PackageRegistry|For more information on Composer packages in GitLab, %{linkStart}see the documentation.%{linkEnd}" msgstr "" @@ -20900,9 +20894,6 @@ msgstr "" msgid "PackageRegistry|Source project located at %{link}" msgstr "" -msgid "PackageRegistry|There are no %{packageType} packages yet" -msgstr "" - msgid "PackageRegistry|There are no other versions of this package." msgstr "" @@ -20918,6 +20909,9 @@ msgstr "" msgid "PackageRegistry|To widen your search, change or remove the filters above." msgstr "" +msgid "PackageRegistry|Type" +msgstr "" + msgid "PackageRegistry|Unable to fetch package version information." msgstr "" @@ -33393,15 +33387,15 @@ msgstr "" msgid "Your GPG keys (%{count})" msgstr "" +msgid "Your GitLab Ultimate trial will last 30 days after which point you can keep your free GitLab account forever. We just need some additional information to activate your trial." +msgstr "" + msgid "Your GitLab account request has been approved!" msgstr "" msgid "Your GitLab group" msgstr "" -msgid "Your Gitlab Ultimate trial will last 30 days after which point you can keep your free Gitlab account forever. We just need some additional information to activate your trial." -msgstr "" - msgid "Your Groups" msgstr "" diff --git a/spec/config/object_store_settings_spec.rb b/spec/config/object_store_settings_spec.rb index 9e7dfa043c3..8b507acb827 100644 --- a/spec/config/object_store_settings_spec.rb +++ b/spec/config/object_store_settings_spec.rb @@ -49,6 +49,20 @@ RSpec.describe ObjectStoreSettings do } end + shared_examples 'consolidated settings for objects accelerated by Workhorse' do + it 'consolidates active object storage settings' do + described_class::WORKHORSE_ACCELERATED_TYPES.each do |object_type| + # Use to_h to avoid https://gitlab.com/gitlab-org/gitlab/-/issues/286873 + section = subject.try(object_type).to_h + + next unless section.dig('object_store', 'enabled') + + expect(section['object_store']['connection']).to eq(connection) + expect(section['object_store']['consolidated_settings']).to be true + end + end + end + it 'sets correct default values' do subject @@ -77,9 +91,7 @@ RSpec.describe ObjectStoreSettings do expect(settings.pages['object_store']['consolidated_settings']).to be true expect(settings.external_diffs['enabled']).to be false - expect(settings.external_diffs['object_store']['enabled']).to be false - expect(settings.external_diffs['object_store']['remote_directory']).to eq('external_diffs') - expect(settings.external_diffs['object_store']['consolidated_settings']).to be true + expect(settings.external_diffs['object_store']).to be_nil end it 'raises an error when a bucket is missing' do @@ -95,29 +107,49 @@ RSpec.describe ObjectStoreSettings do expect(settings.pages['object_store']).to eq(nil) end - it 'allows pages to define its own connection' do - pages_connection = { 'provider' => 'Google', 'google_application_default' => true } - config['pages'] = { - 'enabled' => true, - 'object_store' => { + context 'GitLab Pages' do + let(:pages_connection) { { 'provider' => 'Google', 'google_application_default' => true } } + + before do + config['pages'] = { 'enabled' => true, - 'connection' => pages_connection + 'object_store' => { + 'enabled' => true, + 'connection' => pages_connection + } } - } - - expect { subject }.not_to raise_error - - described_class::WORKHORSE_ACCELERATED_TYPES.each do |object_type| - section = settings.try(object_type) - - next unless section - - expect(section['object_store']['connection']).to eq(connection) - expect(section['object_store']['consolidated_settings']).to be true end - expect(settings.pages['object_store']['connection']).to eq(pages_connection) - expect(settings.pages['object_store']['consolidated_settings']).to be_falsey + it_behaves_like 'consolidated settings for objects accelerated by Workhorse' + + it 'allows pages to define its own connection' do + expect { subject }.not_to raise_error + + expect(settings.pages['object_store']['connection']).to eq(pages_connection) + expect(settings.pages['object_store']['consolidated_settings']).to be_falsey + end + end + + context 'when object storage is selectively disabled for artifacts' do + before do + config['artifacts'] = { + 'enabled' => true, + 'object_store' => { + 'enabled' => false + } + } + end + + it_behaves_like 'consolidated settings for objects accelerated by Workhorse' + + it 'does not enable consolidated settings for artifacts' do + subject + + expect(settings.artifacts['enabled']).to be true + expect(settings.artifacts['object_store']['remote_directory']).to be_nil + expect(settings.artifacts['object_store']['enabled']).to be_falsey + expect(settings.artifacts['object_store']['consolidated_settings']).to be_falsey + end end context 'with legacy config' do diff --git a/spec/finders/merge_requests/oldest_per_commit_finder_spec.rb b/spec/finders/merge_requests/oldest_per_commit_finder_spec.rb new file mode 100644 index 00000000000..4e9d021fa5d --- /dev/null +++ b/spec/finders/merge_requests/oldest_per_commit_finder_spec.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe MergeRequests::OldestPerCommitFinder do + describe '#execute' do + it 'returns a Hash mapping commit SHAs to their oldest merge requests' do + project = create(:project) + mr1 = create(:merge_request, :merged, target_project: project) + mr2 = create(:merge_request, :merged, target_project: project) + mr1_diff = create(:merge_request_diff, merge_request: mr1) + mr2_diff = create(:merge_request_diff, merge_request: mr2) + sha1 = Digest::SHA1.hexdigest('foo') + sha2 = Digest::SHA1.hexdigest('bar') + + create(:merge_request_diff_commit, merge_request_diff: mr1_diff, sha: sha1) + create(:merge_request_diff_commit, merge_request_diff: mr2_diff, sha: sha1) + create( + :merge_request_diff_commit, + merge_request_diff: mr2_diff, + sha: sha2, + relative_order: 1 + ) + + commits = [double(:commit, id: sha1), double(:commit, id: sha2)] + + expect(described_class.new(project).execute(commits)).to eq( + sha1 => mr1, + sha2 => mr2 + ) + end + + it 'skips merge requests that are not merged' do + mr = create(:merge_request) + mr_diff = create(:merge_request_diff, merge_request: mr) + sha = Digest::SHA1.hexdigest('foo') + + create(:merge_request_diff_commit, merge_request_diff: mr_diff, sha: sha) + + commits = [double(:commit, id: sha)] + + expect(described_class.new(mr.target_project).execute(commits)) + .to be_empty + end + end +end diff --git a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js index a754c682356..e66e37a4ae6 100644 --- a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js +++ b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js @@ -75,6 +75,8 @@ describe('Edit feature flag form', () => { }); const findAlert = () => wrapper.find(GlAlert); + const findWarningGlAlert = () => + wrapper.findAll(GlAlert).filter((c) => c.props('variant') === 'warning'); it('should display the iid', () => { expect(wrapper.find('h3').text()).toContain('^5'); @@ -88,7 +90,7 @@ describe('Edit feature flag form', () => { expect(wrapper.find(GlToggle).props('value')).toBe(true); }); - it('should not alert users that feature flags are changing soon', () => { + it('should alert users the flag is read only', () => { expect(findAlert().text()).toContain('GitLab is moving to a new way of managing feature flags'); }); @@ -96,8 +98,9 @@ describe('Edit feature flag form', () => { it('should render the error', () => { store.dispatch('receiveUpdateFeatureFlagError', { message: ['The name is required'] }); return wrapper.vm.$nextTick(() => { - expect(wrapper.find('.alert-danger').exists()).toEqual(true); - expect(wrapper.find('.alert-danger').text()).toContain('The name is required'); + const warningGlAlert = findWarningGlAlert(); + expect(warningGlAlert.at(1).exists()).toEqual(true); + expect(warningGlAlert.at(1).text()).toContain('The name is required'); }); }); }); diff --git a/spec/frontend/feature_flags/components/new_feature_flag_spec.js b/spec/frontend/feature_flags/components/new_feature_flag_spec.js index e317ac4b092..2dfcdf201fb 100644 --- a/spec/frontend/feature_flags/components/new_feature_flag_spec.js +++ b/spec/frontend/feature_flags/components/new_feature_flag_spec.js @@ -41,6 +41,9 @@ describe('New feature flag form', () => { }); }; + const findWarningGlAlert = () => + wrapper.findAll(GlAlert).filter((c) => c.props('variant') === 'warning'); + beforeEach(() => { factory(); }); @@ -53,8 +56,9 @@ describe('New feature flag form', () => { it('should render the error', () => { store.dispatch('receiveCreateFeatureFlagError', { message: ['The name is required'] }); return wrapper.vm.$nextTick(() => { - expect(wrapper.find('.alert').exists()).toEqual(true); - expect(wrapper.find('.alert').text()).toContain('The name is required'); + const warningGlAlert = findWarningGlAlert(); + expect(warningGlAlert.at(0).exists()).toBe(true); + expect(warningGlAlert.at(0).text()).toContain('The name is required'); }); }); }); @@ -81,10 +85,6 @@ describe('New feature flag form', () => { expect(wrapper.find(Form).props('scopes')).toContainEqual(defaultScope); }); - it('should not alert users that feature flags are changing soon', () => { - expect(wrapper.find(GlAlert).exists()).toBe(false); - }); - it('has an all users strategy by default', () => { const strategies = wrapper.find(Form).props('strategies'); diff --git a/spec/frontend/packages/list/components/__snapshots__/packages_filter_spec.js.snap b/spec/frontend/packages/list/components/__snapshots__/packages_filter_spec.js.snap deleted file mode 100644 index ed77f25916f..00000000000 --- a/spec/frontend/packages/list/components/__snapshots__/packages_filter_spec.js.snap +++ /dev/null @@ -1,14 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`packages_filter renders 1`] = ` - -`; diff --git a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap b/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap index b2df1ac5ab6..3f17731584c 100644 --- a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap +++ b/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap @@ -6,517 +6,60 @@ exports[`packages_list_app renders 1`] = ` packagehelpurl="foo" /> - - - - + +
+
+

+ There are no packages yet +

+ +

+ Learn how to + + publish and share your packages + + with GitLab. +

+ +
+ + + +
+
+
+ +
`; diff --git a/spec/frontend/packages/list/components/packages_filter_spec.js b/spec/frontend/packages/list/components/packages_filter_spec.js deleted file mode 100644 index b186b5f5e48..00000000000 --- a/spec/frontend/packages/list/components/packages_filter_spec.js +++ /dev/null @@ -1,50 +0,0 @@ -import Vuex from 'vuex'; -import { GlSearchBoxByClick } from '@gitlab/ui'; -import { createLocalVue, shallowMount } from '@vue/test-utils'; -import PackagesFilter from '~/packages/list/components/packages_filter.vue'; - -const localVue = createLocalVue(); -localVue.use(Vuex); - -describe('packages_filter', () => { - let wrapper; - let store; - - const findGlSearchBox = () => wrapper.find(GlSearchBoxByClick); - - const mountComponent = () => { - store = new Vuex.Store(); - store.dispatch = jest.fn(); - - wrapper = shallowMount(PackagesFilter, { - localVue, - store, - }); - }; - - beforeEach(mountComponent); - - afterEach(() => { - wrapper.destroy(); - wrapper = null; - }); - - it('renders', () => { - expect(wrapper.element).toMatchSnapshot(); - }); - - describe('emits events', () => { - it('sets the filter value in the store on input', () => { - const searchString = 'foo'; - findGlSearchBox().vm.$emit('input', searchString); - - expect(store.dispatch).toHaveBeenCalledWith('setFilter', searchString); - }); - - it('emits the filter event when search box is submitted', () => { - findGlSearchBox().vm.$emit('submit'); - - expect(wrapper.emitted('filter')).toBeTruthy(); - }); - }); -}); diff --git a/spec/frontend/packages/list/components/packages_list_app_spec.js b/spec/frontend/packages/list/components/packages_list_app_spec.js index 217096f822a..36dd73666ea 100644 --- a/spec/frontend/packages/list/components/packages_list_app_spec.js +++ b/spec/frontend/packages/list/components/packages_list_app_spec.js @@ -1,9 +1,10 @@ import Vuex from 'vuex'; import { shallowMount, createLocalVue } from '@vue/test-utils'; -import { GlEmptyState, GlTab, GlTabs, GlSprintf, GlLink } from '@gitlab/ui'; +import { GlEmptyState, GlSprintf, GlLink } from '@gitlab/ui'; import * as commonUtils from '~/lib/utils/common_utils'; import createFlash from '~/flash'; import PackageListApp from '~/packages/list/components/packages_list_app.vue'; +import PackageSearch from '~/packages/list/components/package_search.vue'; import { SHOW_DELETE_SUCCESS_ALERT } from '~/packages/shared/constants'; import { DELETE_PACKAGE_SUCCESS_MESSAGE } from '~/packages/list/constants'; @@ -26,9 +27,9 @@ describe('packages_list_app', () => { const emptyListHelpUrl = 'helpUrl'; const findEmptyState = () => wrapper.find(GlEmptyState); const findListComponent = () => wrapper.find(PackageList); - const findTabComponent = (index = 0) => wrapper.findAll(GlTab).at(index); + const findPackageSearch = () => wrapper.find(PackageSearch); - const createStore = (filterQuery = '') => { + const createStore = (filter = []) => { store = new Vuex.Store({ state: { isLoading: false, @@ -38,7 +39,7 @@ describe('packages_list_app', () => { emptyListHelpUrl, packageHelpUrl: 'foo', }, - filterQuery, + filter, }, }); store.dispatch = jest.fn(); @@ -52,8 +53,6 @@ describe('packages_list_app', () => { GlEmptyState, GlLoadingIcon, PackageList, - GlTab, - GlTabs, GlSprintf, GlLink, }, @@ -122,27 +121,9 @@ describe('packages_list_app', () => { expect(store.dispatch).toHaveBeenCalledTimes(1); }); - describe('tab change', () => { - it('calls requestPackagesList when all tab is clicked', () => { - mountComponent(); - - findTabComponent().trigger('click'); - - expect(store.dispatch).toHaveBeenCalledWith('requestPackagesList'); - }); - - it('calls requestPackagesList when a package type tab is clicked', () => { - mountComponent(); - - findTabComponent(1).trigger('click'); - - expect(store.dispatch).toHaveBeenCalledWith('requestPackagesList'); - }); - }); - describe('filter without results', () => { beforeEach(() => { - createStore('foo'); + createStore([{ type: 'something' }]); mountComponent(); }); @@ -154,12 +135,28 @@ describe('packages_list_app', () => { }); }); + describe('Package Search', () => { + it('exists', () => { + mountComponent(); + + expect(findPackageSearch().exists()).toBe(true); + }); + + it.each(['sort:changed', 'filter:changed'])('on %p fetches data from the store', (event) => { + mountComponent(); + + findPackageSearch().vm.$emit(event); + + expect(store.dispatch).toHaveBeenCalledWith('requestPackagesList'); + }); + }); + describe('delete alert handling', () => { const { location } = window.location; const search = `?${SHOW_DELETE_SUCCESS_ALERT}=true`; beforeEach(() => { - createStore('foo'); + createStore(); jest.spyOn(commonUtils, 'historyReplaceState').mockImplementation(() => {}); delete window.location; window.location = { diff --git a/spec/frontend/packages/list/components/packages_search_spec.js b/spec/frontend/packages/list/components/packages_search_spec.js new file mode 100644 index 00000000000..23a8d3fb871 --- /dev/null +++ b/spec/frontend/packages/list/components/packages_search_spec.js @@ -0,0 +1,145 @@ +import Vuex from 'vuex'; +import { GlSorting, GlSortingItem, GlFilteredSearch } from '@gitlab/ui'; +import { shallowMount, createLocalVue } from '@vue/test-utils'; +import component from '~/packages/list/components/package_search.vue'; +import PackageTypeToken from '~/packages/list/components/tokens/package_type_token.vue'; + +const localVue = createLocalVue(); +localVue.use(Vuex); + +describe('Package Search', () => { + let wrapper; + let store; + let sorting; + let sortingItems; + + const findPackageListSorting = () => wrapper.find(GlSorting); + const findSortingItems = () => wrapper.findAll(GlSortingItem); + const findFilteredSearch = () => wrapper.find(GlFilteredSearch); + + const createStore = (isGroupPage) => { + const state = { + config: { + isGroupPage, + }, + sorting: { + orderBy: 'version', + sort: 'desc', + }, + filter: [], + }; + store = new Vuex.Store({ + state, + }); + store.dispatch = jest.fn(); + }; + + const mountComponent = (isGroupPage = false) => { + createStore(isGroupPage); + + wrapper = shallowMount(component, { + localVue, + store, + stubs: { + GlSortingItem, + }, + }); + }; + + afterEach(() => { + wrapper.destroy(); + wrapper = null; + }); + + describe('searching', () => { + it('has a filtered-search component', () => { + mountComponent(); + + expect(findFilteredSearch().exists()).toBe(true); + }); + + it('binds the correct props to filtered-search', () => { + mountComponent(); + + expect(findFilteredSearch().props()).toMatchObject({ + value: [], + placeholder: 'Filter results', + availableTokens: wrapper.vm.tokens, + }); + }); + + it('updates vuex when value changes', () => { + mountComponent(); + + findFilteredSearch().vm.$emit('input', ['foo']); + + expect(store.dispatch).toHaveBeenCalledWith('setFilter', ['foo']); + }); + + it('emits filter:changed on submit event', () => { + mountComponent(); + + findFilteredSearch().vm.$emit('submit'); + expect(wrapper.emitted('filter:changed')).toEqual([[]]); + }); + + it('emits filter:changed on clear event and reset vuex', () => { + mountComponent(); + + findFilteredSearch().vm.$emit('clear'); + + expect(store.dispatch).toHaveBeenCalledWith('setFilter', []); + expect(wrapper.emitted('filter:changed')).toEqual([[]]); + }); + + it('has a PackageTypeToken token', () => { + mountComponent(); + + expect(findFilteredSearch().props('availableTokens')).toEqual( + expect.arrayContaining([ + expect.objectContaining({ token: PackageTypeToken, type: 'type', icon: 'package' }), + ]), + ); + }); + }); + + describe('sorting', () => { + describe('when is in projects', () => { + beforeEach(() => { + mountComponent(); + sorting = findPackageListSorting(); + sortingItems = findSortingItems(); + }); + + it('has all the sortable items', () => { + expect(sortingItems).toHaveLength(wrapper.vm.sortableFields.length); + }); + + it('on sort change set sorting in vuex and emit event', () => { + sorting.vm.$emit('sortDirectionChange'); + expect(store.dispatch).toHaveBeenCalledWith('setSorting', { sort: 'asc' }); + expect(wrapper.emitted('sort:changed')).toBeTruthy(); + }); + + it('on sort item click set sorting and emit event', () => { + const item = sortingItems.at(0); + const { orderBy } = wrapper.vm.sortableFields[0]; + item.vm.$emit('click'); + expect(store.dispatch).toHaveBeenCalledWith('setSorting', { orderBy }); + expect(wrapper.emitted('sort:changed')).toBeTruthy(); + }); + }); + + describe('when is in group', () => { + beforeEach(() => { + mountComponent(true); + sorting = findPackageListSorting(); + sortingItems = findSortingItems(); + }); + + it('has all the sortable items', () => { + expect(sortingItems).toHaveLength(wrapper.vm.sortableFields.length); + }); + }); + }); +}); diff --git a/spec/frontend/packages/list/components/packages_sort_spec.js b/spec/frontend/packages/list/components/packages_sort_spec.js deleted file mode 100644 index d15ad9bd542..00000000000 --- a/spec/frontend/packages/list/components/packages_sort_spec.js +++ /dev/null @@ -1,90 +0,0 @@ -import Vuex from 'vuex'; -import { GlSorting, GlSortingItem } from '@gitlab/ui'; -import { mount, createLocalVue } from '@vue/test-utils'; -import stubChildren from 'helpers/stub_children'; -import PackagesSort from '~/packages/list/components/packages_sort.vue'; - -const localVue = createLocalVue(); -localVue.use(Vuex); - -describe('packages_sort', () => { - let wrapper; - let store; - let sorting; - let sortingItems; - - const findPackageListSorting = () => wrapper.find(GlSorting); - const findSortingItems = () => wrapper.findAll(GlSortingItem); - - const createStore = (isGroupPage) => { - const state = { - config: { - isGroupPage, - }, - sorting: { - orderBy: 'version', - sort: 'desc', - }, - }; - store = new Vuex.Store({ - state, - }); - store.dispatch = jest.fn(); - }; - - const mountComponent = (isGroupPage = false) => { - createStore(isGroupPage); - - wrapper = mount(PackagesSort, { - localVue, - store, - stubs: { - ...stubChildren(PackagesSort), - GlSortingItem, - }, - }); - }; - - afterEach(() => { - wrapper.destroy(); - wrapper = null; - }); - - describe('when is in projects', () => { - beforeEach(() => { - mountComponent(); - sorting = findPackageListSorting(); - sortingItems = findSortingItems(); - }); - - it('has all the sortable items', () => { - expect(sortingItems).toHaveLength(wrapper.vm.sortableFields.length); - }); - - it('on sort change set sorting in vuex and emit event', () => { - sorting.vm.$emit('sortDirectionChange'); - expect(store.dispatch).toHaveBeenCalledWith('setSorting', { sort: 'asc' }); - expect(wrapper.emitted('sort:changed')).toBeTruthy(); - }); - - it('on sort item click set sorting and emit event', () => { - const item = sortingItems.at(0); - const { orderBy } = wrapper.vm.sortableFields[0]; - item.vm.$emit('click'); - expect(store.dispatch).toHaveBeenCalledWith('setSorting', { orderBy }); - expect(wrapper.emitted('sort:changed')).toBeTruthy(); - }); - }); - - describe('when is in group', () => { - beforeEach(() => { - mountComponent(true); - sorting = findPackageListSorting(); - sortingItems = findSortingItems(); - }); - - it('has all the sortable items', () => { - expect(sortingItems).toHaveLength(wrapper.vm.sortableFields.length); - }); - }); -}); diff --git a/spec/frontend/packages/list/components/tokens/package_type_token_spec.js b/spec/frontend/packages/list/components/tokens/package_type_token_spec.js new file mode 100644 index 00000000000..a654f431266 --- /dev/null +++ b/spec/frontend/packages/list/components/tokens/package_type_token_spec.js @@ -0,0 +1,48 @@ +import { shallowMount } from '@vue/test-utils'; +import { GlFilteredSearchToken, GlFilteredSearchSuggestion } from '@gitlab/ui'; +import component from '~/packages/list/components/tokens/package_type_token.vue'; +import { PACKAGE_TYPES } from '~/packages/list/constants'; + +describe('packages_filter', () => { + let wrapper; + + const findFilteredSearchToken = () => wrapper.find(GlFilteredSearchToken); + const findFilteredSearchSuggestions = () => wrapper.findAll(GlFilteredSearchSuggestion); + + const mountComponent = ({ attrs, listeners } = {}) => { + wrapper = shallowMount(component, { + attrs, + listeners, + }); + }; + + afterEach(() => { + wrapper.destroy(); + wrapper = null; + }); + + it('it binds all of his attrs to filtered search token', () => { + mountComponent({ attrs: { foo: 'bar' } }); + + expect(findFilteredSearchToken().attributes('foo')).toBe('bar'); + }); + + it('it binds all of his events to filtered search token', () => { + const clickListener = jest.fn(); + mountComponent({ listeners: { click: clickListener } }); + + findFilteredSearchToken().vm.$emit('click'); + + expect(clickListener).toHaveBeenCalled(); + }); + + it.each(PACKAGE_TYPES.map((p, index) => [p, index]))( + 'displays a suggestion for %p', + (packageType, index) => { + mountComponent(); + const item = findFilteredSearchSuggestions().at(index); + expect(item.text()).toBe(packageType.title); + expect(item.props('value')).toBe(packageType.type); + }, + ); +}); diff --git a/spec/frontend/packages/list/stores/actions_spec.js b/spec/frontend/packages/list/stores/actions_spec.js index 05e1fe57cae..d33851e6ab3 100644 --- a/spec/frontend/packages/list/stores/actions_spec.js +++ b/spec/frontend/packages/list/stores/actions_spec.js @@ -30,11 +30,13 @@ describe('Actions Package list store', () => { sort: 'asc', orderBy: 'version', }; + + const filter = []; it('should fetch the project packages list when isGroupPage is false', (done) => { testAction( actions.requestPackagesList, undefined, - { config: { isGroupPage: false, resourceId: 1 }, sorting }, + { config: { isGroupPage: false, resourceId: 1 }, sorting, filter }, [], [ { type: 'setLoading', payload: true }, @@ -54,7 +56,7 @@ describe('Actions Package list store', () => { testAction( actions.requestPackagesList, undefined, - { config: { isGroupPage: true, resourceId: 2 }, sorting }, + { config: { isGroupPage: true, resourceId: 2 }, sorting, filter }, [], [ { type: 'setLoading', payload: true }, @@ -70,7 +72,7 @@ describe('Actions Package list store', () => { ); }); - it('should fetch packages of a certain type when selectedType is present', (done) => { + it('should fetch packages of a certain type when a filter with a type is present', (done) => { const packageType = 'maven'; testAction( @@ -79,7 +81,7 @@ describe('Actions Package list store', () => { { config: { isGroupPage: false, resourceId: 1 }, sorting, - selectedType: { type: packageType }, + filter: [{ type: 'type', value: { data: 'maven' } }], }, [], [ @@ -107,7 +109,7 @@ describe('Actions Package list store', () => { testAction( actions.requestPackagesList, undefined, - { config: { isGroupPage: false, resourceId: 2 }, sorting }, + { config: { isGroupPage: false, resourceId: 2 }, sorting, filter }, [], [ { type: 'setLoading', payload: true }, diff --git a/spec/frontend/packages/list/stores/mutations_spec.js b/spec/frontend/packages/list/stores/mutations_spec.js index 0d424a0c011..743de595eb5 100644 --- a/spec/frontend/packages/list/stores/mutations_spec.js +++ b/spec/frontend/packages/list/stores/mutations_spec.js @@ -78,17 +78,10 @@ describe('Mutations Registry Store', () => { }); }); - describe('SET_SELECTED_TYPE', () => { - it('should set the selected type', () => { - mutations[types.SET_SELECTED_TYPE](mockState, { type: 'maven' }); - expect(mockState.selectedType).toEqual({ type: 'maven' }); - }); - }); - describe('SET_FILTER', () => { it('should set the filter query', () => { mutations[types.SET_FILTER](mockState, 'foo'); - expect(mockState.filterQuery).toEqual('foo'); + expect(mockState.filter).toEqual('foo'); }); }); }); diff --git a/spec/frontend/reports/codequality_report/mock_data.js b/spec/frontend/reports/codequality_report/mock_data.js index 9bd61527d3f..c5cecb34509 100644 --- a/spec/frontend/reports/codequality_report/mock_data.js +++ b/spec/frontend/reports/codequality_report/mock_data.js @@ -88,3 +88,53 @@ export const issueDiff = [ urlPath: 'headPath/lib/six.rb#L6', }, ]; + +export const reportIssues = { + status: 'failed', + new_errors: [ + { + description: + 'Method `long_if` has a Cognitive Complexity of 10 (exceeds 5 allowed). Consider refactoring.', + severity: 'minor', + file_path: 'codequality.rb', + line: 5, + }, + ], + resolved_errors: [ + { + description: 'Insecure Dependency', + severity: 'major', + file_path: 'lib/six.rb', + line: 22, + }, + ], + existing_errors: [], + summary: { total: 3, resolved: 0, errored: 3 }, +}; + +export const parsedReportIssues = { + newIssues: [ + { + description: + 'Method `long_if` has a Cognitive Complexity of 10 (exceeds 5 allowed). Consider refactoring.', + file_path: 'codequality.rb', + line: 5, + name: + 'Method `long_if` has a Cognitive Complexity of 10 (exceeds 5 allowed). Consider refactoring.', + path: 'codequality.rb', + severity: 'minor', + urlPath: 'null/codequality.rb#L5', + }, + ], + resolvedIssues: [ + { + description: 'Insecure Dependency', + file_path: 'lib/six.rb', + line: 22, + name: 'Insecure Dependency', + path: 'lib/six.rb', + severity: 'major', + urlPath: 'null/lib/six.rb#L22', + }, + ], +}; diff --git a/spec/frontend/reports/codequality_report/store/actions_spec.js b/spec/frontend/reports/codequality_report/store/actions_spec.js index 321785cb85a..b70ff92b079 100644 --- a/spec/frontend/reports/codequality_report/store/actions_spec.js +++ b/spec/frontend/reports/codequality_report/store/actions_spec.js @@ -5,7 +5,14 @@ import axios from '~/lib/utils/axios_utils'; import * as actions from '~/reports/codequality_report/store/actions'; import * as types from '~/reports/codequality_report/store/mutation_types'; import createStore from '~/reports/codequality_report/store'; -import { headIssues, baseIssues, mockParsedHeadIssues, mockParsedBaseIssues } from '../mock_data'; +import { + headIssues, + baseIssues, + mockParsedHeadIssues, + mockParsedBaseIssues, + reportIssues, + parsedReportIssues, +} from '../mock_data'; // mock codequality comparison worker jest.mock('~/reports/codequality_report/workers/codequality_comparison_worker', () => @@ -39,6 +46,7 @@ describe('Codequality Reports actions', () => { headPath: 'headPath', baseBlobPath: 'baseBlobPath', headBlobPath: 'headBlobPath', + reportsPath: 'reportsPath', helpPath: 'codequalityHelpPath', }; @@ -55,68 +63,119 @@ describe('Codequality Reports actions', () => { describe('fetchReports', () => { let mock; + let diffFeatureFlagEnabled; - beforeEach(() => { - localState.headPath = `${TEST_HOST}/head.json`; - localState.basePath = `${TEST_HOST}/base.json`; - mock = new MockAdapter(axios); - }); + describe('with codequalityMrDiff feature flag enabled', () => { + beforeEach(() => { + diffFeatureFlagEnabled = true; + localState.reportsPath = `${TEST_HOST}/codequality_reports.json`; + mock = new MockAdapter(axios); + }); - afterEach(() => { - mock.restore(); - }); + afterEach(() => { + mock.restore(); + }); - describe('on success', () => { - it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', (done) => { - mock.onGet(`${TEST_HOST}/head.json`).reply(200, headIssues); - mock.onGet(`${TEST_HOST}/base.json`).reply(200, baseIssues); + describe('on success', () => { + it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', (done) => { + mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(200, reportIssues); - testAction( - actions.fetchReports, - null, - localState, - [{ type: types.REQUEST_REPORTS }], - [ - { - payload: { - newIssues: [mockParsedHeadIssues[0]], - resolvedIssues: [mockParsedBaseIssues[0]], + testAction( + actions.fetchReports, + diffFeatureFlagEnabled, + localState, + [{ type: types.REQUEST_REPORTS }], + [ + { + payload: parsedReportIssues, + type: 'receiveReportsSuccess', }, - type: 'receiveReportsSuccess', - }, - ], - done, - ); + ], + done, + ); + }); + }); + + describe('on error', () => { + it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => { + mock.onGet(`${TEST_HOST}/codequality_reports.json`).reply(500); + + testAction( + actions.fetchReports, + diffFeatureFlagEnabled, + localState, + [{ type: types.REQUEST_REPORTS }], + [{ type: 'receiveReportsError', payload: expect.any(Error) }], + done, + ); + }); }); }); - describe('on error', () => { - it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => { - mock.onGet(`${TEST_HOST}/head.json`).reply(500); - - testAction( - actions.fetchReports, - null, - localState, - [{ type: types.REQUEST_REPORTS }], - [{ type: 'receiveReportsError' }], - done, - ); + describe('with codequalityMrDiff feature flag disabled', () => { + beforeEach(() => { + diffFeatureFlagEnabled = false; + localState.headPath = `${TEST_HOST}/head.json`; + localState.basePath = `${TEST_HOST}/base.json`; + mock = new MockAdapter(axios); }); - }); - describe('with no base path', () => { - it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => { - localState.basePath = null; + afterEach(() => { + mock.restore(); + }); - testAction( - actions.fetchReports, - null, - localState, - [{ type: types.REQUEST_REPORTS }], - [{ type: 'receiveReportsError' }], - done, - ); + describe('on success', () => { + it('commits REQUEST_REPORTS and dispatches receiveReportsSuccess', (done) => { + mock.onGet(`${TEST_HOST}/head.json`).reply(200, headIssues); + mock.onGet(`${TEST_HOST}/base.json`).reply(200, baseIssues); + + testAction( + actions.fetchReports, + diffFeatureFlagEnabled, + localState, + [{ type: types.REQUEST_REPORTS }], + [ + { + payload: { + newIssues: [mockParsedHeadIssues[0]], + resolvedIssues: [mockParsedBaseIssues[0]], + }, + type: 'receiveReportsSuccess', + }, + ], + done, + ); + }); + }); + + describe('on error', () => { + it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => { + mock.onGet(`${TEST_HOST}/head.json`).reply(500); + + testAction( + actions.fetchReports, + diffFeatureFlagEnabled, + localState, + [{ type: types.REQUEST_REPORTS }], + [{ type: 'receiveReportsError', payload: expect.any(Error) }], + done, + ); + }); + }); + + describe('with no base path', () => { + it('commits REQUEST_REPORTS and dispatches receiveReportsError', (done) => { + localState.basePath = null; + + testAction( + actions.fetchReports, + diffFeatureFlagEnabled, + localState, + [{ type: types.REQUEST_REPORTS }], + [{ type: 'receiveReportsError' }], + done, + ); + }); }); }); }); @@ -142,7 +201,7 @@ describe('Codequality Reports actions', () => { actions.receiveReportsError, null, localState, - [{ type: types.RECEIVE_REPORTS_ERROR }], + [{ type: types.RECEIVE_REPORTS_ERROR, payload: null }], [], done, ); diff --git a/spec/frontend/reports/codequality_report/store/mutations_spec.js b/spec/frontend/reports/codequality_report/store/mutations_spec.js index 658abf3088c..f7f9e611ee8 100644 --- a/spec/frontend/reports/codequality_report/store/mutations_spec.js +++ b/spec/frontend/reports/codequality_report/store/mutations_spec.js @@ -55,6 +55,12 @@ describe('Codequality Reports mutations', () => { expect(localState.hasError).toEqual(false); }); + it('clears statusReason', () => { + mutations.RECEIVE_REPORTS_SUCCESS(localState, {}); + + expect(localState.statusReason).toEqual(''); + }); + it('sets newIssues and resolvedIssues from response data', () => { const data = { newIssues: [{ id: 1 }], resolvedIssues: [{ id: 2 }] }; mutations.RECEIVE_REPORTS_SUCCESS(localState, data); @@ -76,5 +82,13 @@ describe('Codequality Reports mutations', () => { expect(localState.hasError).toEqual(true); }); + + it('sets statusReason to string from error response data', () => { + const data = { status_reason: 'This merge request does not have codequality reports' }; + const error = { response: { data } }; + mutations.RECEIVE_REPORTS_ERROR(localState, error); + + expect(localState.statusReason).toEqual(data.status_reason); + }); }); }); diff --git a/spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js b/spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js index 085d697672d..389e9b4a1f6 100644 --- a/spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js +++ b/spec/frontend/reports/codequality_report/store/utils/codequality_comparison_spec.js @@ -2,7 +2,13 @@ import { parseCodeclimateMetrics, doCodeClimateComparison, } from '~/reports/codequality_report/store/utils/codequality_comparison'; -import { baseIssues, mockParsedHeadIssues, mockParsedBaseIssues } from '../../mock_data'; +import { + baseIssues, + mockParsedHeadIssues, + mockParsedBaseIssues, + reportIssues, + parsedReportIssues, +} from '../../mock_data'; jest.mock('~/reports/codequality_report/workers/codequality_comparison_worker', () => { let mockPostMessageCallback; @@ -34,7 +40,7 @@ describe('Codequality report store utils', () => { let result; describe('parseCodeclimateMetrics', () => { - it('should parse the received issues', () => { + it('should parse the issues from codeclimate artifacts', () => { [result] = parseCodeclimateMetrics(baseIssues, 'path'); expect(result.name).toEqual(baseIssues[0].check_name); @@ -42,6 +48,14 @@ describe('Codequality report store utils', () => { expect(result.line).toEqual(baseIssues[0].location.lines.begin); }); + it('should parse the issues from backend codequality diff', () => { + [result] = parseCodeclimateMetrics(reportIssues.new_errors, 'path'); + + expect(result.name).toEqual(parsedReportIssues.newIssues[0].name); + expect(result.path).toEqual(parsedReportIssues.newIssues[0].path); + expect(result.line).toEqual(parsedReportIssues.newIssues[0].line); + }); + describe('when an issue has no location or path', () => { const issue = { description: 'Insecure Dependency' }; diff --git a/spec/helpers/avatars_helper_spec.rb b/spec/helpers/avatars_helper_spec.rb index 9e18ab34c1f..ac325bd4b29 100644 --- a/spec/helpers/avatars_helper_spec.rb +++ b/spec/helpers/avatars_helper_spec.rb @@ -135,6 +135,15 @@ RSpec.describe AvatarsHelper do helper.avatar_icon_for_user(nil, 20, 2) end end + + context 'for a blocked user' do + let(:user) { create(:user, :blocked) } + + it 'returns the default avatar' do + expect(helper.avatar_icon_for_user(user).to_s) + .to eq(helper.default_avatar) + end + end end describe '#gravatar_icon' do diff --git a/spec/lib/gitlab/changelog/committer_spec.rb b/spec/lib/gitlab/changelog/committer_spec.rb index 71a80264f29..f0d6bc2b6b5 100644 --- a/spec/lib/gitlab/changelog/committer_spec.rb +++ b/spec/lib/gitlab/changelog/committer_spec.rb @@ -86,5 +86,43 @@ RSpec.describe Gitlab::Changelog::Committer do end.not_to raise_error end end + + context "when the changelog changes before saving the changes" do + it 'raises a CommitError' do + release1 = Gitlab::Changelog::Release + .new(version: '1.0.0', date: Time.utc(2020, 1, 1), config: config) + + release2 = Gitlab::Changelog::Release + .new(version: '2.0.0', date: Time.utc(2020, 1, 1), config: config) + + # This creates the initial commit we'll later use to see if the + # changelog changed before saving our changes. + committer.commit( + release: release1, + file: 'CHANGELOG.md', + branch: 'master', + message: 'Initial commit' + ) + + allow(Gitlab::Git::Commit) + .to receive(:last_for_path) + .with( + project.repository, + 'master', + 'CHANGELOG.md', + literal_pathspec: true + ) + .and_return(double(:commit, sha: 'foo')) + + expect do + committer.commit( + release: release2, + file: 'CHANGELOG.md', + branch: 'master', + message: 'Test commit' + ) + end.to raise_error(described_class::CommitError) + end + end end end diff --git a/spec/lib/gitlab/changelog/template/compiler_spec.rb b/spec/lib/gitlab/changelog/template/compiler_spec.rb index d940fbaec89..8b09bc90529 100644 --- a/spec/lib/gitlab/changelog/template/compiler_spec.rb +++ b/spec/lib/gitlab/changelog/template/compiler_spec.rb @@ -125,5 +125,12 @@ RSpec.describe Gitlab::Changelog::Template::Compiler do expect(compile(input)).to eq(input) end + + it 'ignores malicious code that makes use of whitespace' do + input = "x<\\\n%::Kernel.system(\"id\")%>" + + expect(Kernel).not_to receive(:system).with('id') + expect(compile(input)).to eq('x<%::Kernel.system("id")%>') + end end end diff --git a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb new file mode 100644 index 00000000000..f132ecbf13b --- /dev/null +++ b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb @@ -0,0 +1,221 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::MigrationHelpers::V2 do + include Database::TriggerHelpers + + let(:migration) do + ActiveRecord::Migration.new.extend(described_class) + end + + before do + allow(migration).to receive(:puts) + end + + shared_examples_for 'Setting up to rename a column' do + let(:model) { Class.new(ActiveRecord::Base) } + + before do + model.table_name = :test_table + end + + context 'when called inside a transaction block' do + before do + allow(migration).to receive(:transaction_open?).and_return(true) + end + + it 'raises an error' do + expect do + migration.public_send(operation, :test_table, :original, :renamed) + end.to raise_error("#{operation} can not be run inside a transaction") + end + end + + context 'when the existing column has a default value' do + before do + migration.change_column_default :test_table, existing_column, 'default value' + end + + it 'raises an error' do + expect do + migration.public_send(operation, :test_table, :original, :renamed) + end.to raise_error("#{operation} does not currently support columns with default values") + end + end + + context 'when passing a batch column' do + context 'when the batch column does not exist' do + it 'raises an error' do + expect do + migration.public_send(operation, :test_table, :original, :renamed, batch_column_name: :missing) + end.to raise_error('Column missing does not exist on test_table') + end + end + + context 'when the batch column does exist' do + it 'passes it when creating the column' do + expect(migration).to receive(:create_column_from) + .with(:test_table, existing_column, added_column, type: nil, batch_column_name: :status) + .and_call_original + + migration.public_send(operation, :test_table, :original, :renamed, batch_column_name: :status) + end + end + end + + it 'creates the renamed column, syncing existing data' do + existing_record_1 = model.create!(status: 0, existing_column => 'existing') + existing_record_2 = model.create!(status: 0, existing_column => nil) + + migration.send(operation, :test_table, :original, :renamed) + model.reset_column_information + + expect(migration.column_exists?(:test_table, added_column)).to eq(true) + + expect(existing_record_1.reload).to have_attributes(status: 0, original: 'existing', renamed: 'existing') + expect(existing_record_2.reload).to have_attributes(status: 0, original: nil, renamed: nil) + end + + it 'installs triggers to sync new data' do + migration.public_send(operation, :test_table, :original, :renamed) + model.reset_column_information + + new_record_1 = model.create!(status: 1, original: 'first') + new_record_2 = model.create!(status: 1, renamed: 'second') + + expect(new_record_1.reload).to have_attributes(status: 1, original: 'first', renamed: 'first') + expect(new_record_2.reload).to have_attributes(status: 1, original: 'second', renamed: 'second') + + new_record_1.update!(original: 'updated') + new_record_2.update!(renamed: nil) + + expect(new_record_1.reload).to have_attributes(status: 1, original: 'updated', renamed: 'updated') + expect(new_record_2.reload).to have_attributes(status: 1, original: nil, renamed: nil) + end + end + + describe '#rename_column_concurrently' do + before do + allow(migration).to receive(:transaction_open?).and_return(false) + + migration.create_table :test_table do |t| + t.integer :status, null: false + t.text :original + t.text :other_column + end + end + + it_behaves_like 'Setting up to rename a column' do + let(:operation) { :rename_column_concurrently } + let(:existing_column) { :original } + let(:added_column) { :renamed } + end + + context 'when the column to rename does not exist' do + it 'raises an error' do + expect do + migration.rename_column_concurrently :test_table, :missing_column, :renamed + end.to raise_error('Column missing_column does not exist on test_table') + end + end + end + + describe '#undo_cleanup_concurrent_column_rename' do + before do + allow(migration).to receive(:transaction_open?).and_return(false) + + migration.create_table :test_table do |t| + t.integer :status, null: false + t.text :other_column + t.text :renamed + end + end + + it_behaves_like 'Setting up to rename a column' do + let(:operation) { :undo_cleanup_concurrent_column_rename } + let(:existing_column) { :renamed } + let(:added_column) { :original } + end + + context 'when the renamed column does not exist' do + it 'raises an error' do + expect do + migration.undo_cleanup_concurrent_column_rename :test_table, :original, :missing_column + end.to raise_error('Column missing_column does not exist on test_table') + end + end + end + + shared_examples_for 'Cleaning up from renaming a column' do + let(:connection) { migration.connection } + + before do + allow(migration).to receive(:transaction_open?).and_return(false) + + migration.create_table :test_table do |t| + t.integer :status, null: false + t.text :original + t.text :other_column + end + + migration.rename_column_concurrently :test_table, :original, :renamed + end + + context 'when the helper is called repeatedly' do + before do + migration.public_send(operation, :test_table, :original, :renamed) + end + + it 'does not make repeated attempts to cleanup' do + expect(migration).not_to receive(:remove_column) + + expect do + migration.public_send(operation, :test_table, :original, :renamed) + end.not_to raise_error + end + end + + context 'when the renamed column exists' do + let(:triggers) do + [ + ['trigger_7cc71f92fd63', 'function_for_trigger_7cc71f92fd63', before: 'insert'], + ['trigger_f1a1f619636a', 'function_for_trigger_f1a1f619636a', before: 'update'], + ['trigger_769a49938884', 'function_for_trigger_769a49938884', before: 'update'] + ] + end + + it 'removes the sync triggers and renamed columns' do + triggers.each do |(trigger_name, function_name, event)| + expect_function_to_exist(function_name) + expect_valid_function_trigger(:test_table, trigger_name, function_name, event) + end + + expect(migration.column_exists?(:test_table, added_column)).to eq(true) + + migration.public_send(operation, :test_table, :original, :renamed) + + expect(migration.column_exists?(:test_table, added_column)).to eq(false) + + triggers.each do |(trigger_name, function_name, _)| + expect_trigger_not_to_exist(:test_table, trigger_name) + expect_function_not_to_exist(function_name) + end + end + end + end + + describe '#undo_rename_column_concurrently' do + it_behaves_like 'Cleaning up from renaming a column' do + let(:operation) { :undo_rename_column_concurrently } + let(:added_column) { :renamed } + end + end + + describe '#cleanup_concurrent_column_rename' do + it_behaves_like 'Cleaning up from renaming a column' do + let(:operation) { :cleanup_concurrent_column_rename } + let(:added_column) { :original } + end + end +end diff --git a/spec/lib/gitlab/patch/prependable_spec.rb b/spec/lib/gitlab/patch/prependable_spec.rb index 8feab57a8f3..5b01bb99fc8 100644 --- a/spec/lib/gitlab/patch/prependable_spec.rb +++ b/spec/lib/gitlab/patch/prependable_spec.rb @@ -231,4 +231,22 @@ RSpec.describe Gitlab::Patch::Prependable do .to raise_error(described_class::MultiplePrependedBlocks) end end + + describe 'the extra hack for override verification' do + context 'when ENV["STATIC_VERIFICATION"] is not defined' do + it 'does not extend ClassMethods onto the defining module' do + expect(ee).not_to respond_to(:class_name) + end + end + + context 'when ENV["STATIC_VERIFICATION"] is defined' do + before do + stub_env('STATIC_VERIFICATION', 'true') + end + + it 'does extend ClassMethods onto the defining module' do + expect(ee).to respond_to(:class_name) + end + end + end end diff --git a/spec/lib/gitlab/utils/override_spec.rb b/spec/lib/gitlab/utils/override_spec.rb index 7ba7392df0f..a5e53c1dfc1 100644 --- a/spec/lib/gitlab/utils/override_spec.rb +++ b/spec/lib/gitlab/utils/override_spec.rb @@ -2,6 +2,9 @@ require 'fast_spec_helper' +# Patching ActiveSupport::Concern +require_relative '../../../../config/initializers/0_as_concern' + RSpec.describe Gitlab::Utils::Override do let(:base) do Struct.new(:good) do @@ -164,6 +167,70 @@ RSpec.describe Gitlab::Utils::Override do it_behaves_like 'checking as intended, nothing was overridden' end + + context 'when ActiveSupport::Concern and class_methods are used' do + # We need to give module names before using Override + let(:base) { stub_const('Base', Module.new) } + let(:extension) { stub_const('Extension', Module.new) } + + def define_base(method_name:) + base.module_eval do + extend ActiveSupport::Concern + + class_methods do + define_method(method_name) do + :f + end + end + end + end + + def define_extension(method_name:) + extension.module_eval do + extend ActiveSupport::Concern + + class_methods do + extend Gitlab::Utils::Override + + override method_name + define_method(method_name) do + :g + end + end + end + end + + context 'when it is defining a overriding method' do + before do + define_base(method_name: :f) + define_extension(method_name: :f) + + base.prepend(extension) + end + + it 'verifies' do + expect(base.f).to eq(:g) + + described_class.verify! + end + end + + context 'when it is not defining a overriding method' do + before do + define_base(method_name: :f) + define_extension(method_name: :g) + + base.prepend(extension) + end + + it 'raises NotImplementedError' do + expect(base.f).to eq(:f) + + expect { described_class.verify! } + .to raise_error(NotImplementedError) + end + end + end end context 'when STATIC_VERIFICATION is not set' do diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb index 45bce8c8a5c..a05f1730708 100644 --- a/spec/requests/api/repositories_spec.rb +++ b/spec/requests/api/repositories_spec.rb @@ -610,4 +610,102 @@ RSpec.describe API::Repositories do end end end + + describe 'POST /projects/:id/repository/changelog' do + context 'when the changelog_api feature flag is enabled' do + it 'generates the changelog for a version' do + spy = instance_spy(Repositories::ChangelogService) + + allow(Repositories::ChangelogService) + .to receive(:new) + .with( + project, + user, + version: '1.0.0', + from: 'foo', + to: 'bar', + date: DateTime.new(2020, 1, 1), + branch: 'kittens', + trailer: 'Foo', + file: 'FOO.md', + message: 'Commit message' + ) + .and_return(spy) + + allow(spy).to receive(:execute) + + post( + api("/projects/#{project.id}/repository/changelog", user), + params: { + version: '1.0.0', + from: 'foo', + to: 'bar', + date: '2020-01-01', + branch: 'kittens', + trailer: 'Foo', + file: 'FOO.md', + message: 'Commit message' + } + ) + + expect(response).to have_gitlab_http_status(:ok) + end + + it 'produces an error when generating the changelog fails' do + spy = instance_spy(Repositories::ChangelogService) + + allow(Repositories::ChangelogService) + .to receive(:new) + .with( + project, + user, + version: '1.0.0', + from: 'foo', + to: 'bar', + date: DateTime.new(2020, 1, 1), + branch: 'kittens', + trailer: 'Foo', + file: 'FOO.md', + message: 'Commit message' + ) + .and_return(spy) + + allow(spy) + .to receive(:execute) + .and_raise(Gitlab::Changelog::Committer::CommitError.new('oops')) + + post( + api("/projects/#{project.id}/repository/changelog", user), + params: { + version: '1.0.0', + from: 'foo', + to: 'bar', + date: '2020-01-01', + branch: 'kittens', + trailer: 'Foo', + file: 'FOO.md', + message: 'Commit message' + } + ) + + expect(response).to have_gitlab_http_status(:internal_server_error) + expect(json_response['message']).to eq('Failed to generate the changelog: oops') + end + end + + context 'when the changelog_api feature flag is disabled' do + before do + stub_feature_flags(changelog_api: false) + end + + it 'responds with a 404 Not Found' do + post( + api("/projects/#{project.id}/repository/changelog", user), + params: { version: '1.0.0', from: 'foo', to: 'bar' } + ) + + expect(response).to have_gitlab_http_status(:not_found) + end + end + end end diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb new file mode 100644 index 00000000000..1ecf45bcd16 --- /dev/null +++ b/spec/services/repositories/changelog_service_spec.rb @@ -0,0 +1,74 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Repositories::ChangelogService do + describe '#execute' do + it 'generates and commits a changelog section' do + project = create(:project, :empty_repo) + creator = project.creator + author1 = create(:user) + author2 = create(:user) + + project.add_maintainer(author1) + project.add_maintainer(author2) + + mr1 = create(:merge_request, :merged, target_project: project) + mr2 = create(:merge_request, :merged, target_project: project) + + # The range of commits ignores the first commit, but includes the last + # commit. To ensure both the commits below are included, we must create an + # extra commit. + # + # In the real world, the start commit of the range will be the last commit + # of the previous release, so ignoring that is expected and desired. + sha1 = create_commit( + project, + creator, + commit_message: 'Initial commit', + actions: [{ action: 'create', content: 'test', file_path: 'README.md' }] + ) + + sha2 = create_commit( + project, + author1, + commit_message: "Title 1\n\nChangelog: feature", + actions: [{ action: 'create', content: 'foo', file_path: 'a.txt' }] + ) + + sha3 = create_commit( + project, + author2, + commit_message: "Title 2\n\nChangelog: feature", + actions: [{ action: 'create', content: 'bar', file_path: 'b.txt' }] + ) + + commit1 = project.commit(sha2) + commit2 = project.commit(sha3) + + allow(MergeRequestDiffCommit) + .to receive(:oldest_merge_request_id_per_commit) + .with(project.id, [commit2.id, commit1.id]) + .and_return([ + { sha: sha2, merge_request_id: mr1.id }, + { sha: sha3, merge_request_id: mr2.id } + ]) + + recorder = ActiveRecord::QueryRecorder.new do + described_class + .new(project, creator, version: '1.0.0', from: sha1, to: sha3) + .execute + end + + changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data + + expect(recorder.count).to eq(10) + expect(changelog).to include('Title 1', 'Title 2') + end + end + + def create_commit(project, user, params) + params = { start_branch: 'master', branch_name: 'master' }.merge(params) + Files::MultiService.new(project, user, params).execute.fetch(:result) + end +end