Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
		
							parent
							
								
									d378fdaa60
								
							
						
					
					
						commit
						b119503b70
					
				|  | @ -3,6 +3,7 @@ import axios from '../lib/utils/axios_utils'; | |||
| import { buildApiUrl } from './api_utils'; | ||||
| 
 | ||||
| const PROJECTS_PATH = '/api/:version/projects.json'; | ||||
| const PROJECT_IMPORT_MEMBERS_PATH = '/api/:version/projects/:id/import_project_members/:project_id'; | ||||
| 
 | ||||
| export function getProjects(query, options, callback = () => {}) { | ||||
|   const url = buildApiUrl(PROJECTS_PATH); | ||||
|  | @ -25,3 +26,10 @@ export function getProjects(query, options, callback = () => {}) { | |||
|       return { data, headers }; | ||||
|     }); | ||||
| } | ||||
| 
 | ||||
| export function importProjectMembers(sourceId, targetId) { | ||||
|   const url = buildApiUrl(PROJECT_IMPORT_MEMBERS_PATH) | ||||
|     .replace(':id', sourceId) | ||||
|     .replace(':project_id', targetId); | ||||
|   return axios.post(url); | ||||
| } | ||||
|  |  | |||
|  | @ -0,0 +1,157 @@ | |||
| <script> | ||||
| import { GlButton, GlFormGroup, GlModal, GlModalDirective, GlSprintf } from '@gitlab/ui'; | ||||
| import { uniqueId } from 'lodash'; | ||||
| import { importProjectMembers } from '~/api/projects_api'; | ||||
| import { s__, __, sprintf } from '~/locale'; | ||||
| import ProjectSelect from './project_select.vue'; | ||||
| 
 | ||||
| export default { | ||||
|   components: { | ||||
|     GlButton, | ||||
|     GlFormGroup, | ||||
|     GlModal, | ||||
|     GlSprintf, | ||||
|     ProjectSelect, | ||||
|   }, | ||||
|   directives: { | ||||
|     GlModal: GlModalDirective, | ||||
|   }, | ||||
|   props: { | ||||
|     projectId: { | ||||
|       type: String, | ||||
|       required: true, | ||||
|     }, | ||||
|     projectName: { | ||||
|       type: String, | ||||
|       required: true, | ||||
|     }, | ||||
|   }, | ||||
|   data() { | ||||
|     return { | ||||
|       projectToBeImported: {}, | ||||
|       invalidFeedbackMessage: '', | ||||
|       isLoading: false, | ||||
|     }; | ||||
|   }, | ||||
|   computed: { | ||||
|     modalIntro() { | ||||
|       return sprintf(this.$options.i18n.modalIntro, { | ||||
|         name: this.projectName, | ||||
|       }); | ||||
|     }, | ||||
|     importDisabled() { | ||||
|       return Object.keys(this.projectToBeImported).length === 0; | ||||
|     }, | ||||
|     validationState() { | ||||
|       return this.invalidFeedbackMessage === '' ? null : false; | ||||
|     }, | ||||
|   }, | ||||
|   methods: { | ||||
|     submitImport() { | ||||
|       this.isLoading = true; | ||||
|       return importProjectMembers(this.projectId, this.projectToBeImported.id) | ||||
|         .then(this.showToastMessage) | ||||
|         .catch(this.showErrorAlert) | ||||
|         .finally(() => { | ||||
|           this.isLoading = false; | ||||
|           this.projectToBeImported = {}; | ||||
|         }); | ||||
|     }, | ||||
|     closeModal() { | ||||
|       this.invalidFeedbackMessage = ''; | ||||
| 
 | ||||
|       this.$refs.modal.hide(); | ||||
|     }, | ||||
|     showToastMessage() { | ||||
|       this.$toast.show(this.$options.i18n.successMessage, this.$options.toastOptions); | ||||
| 
 | ||||
|       this.closeModal(); | ||||
|     }, | ||||
|     showErrorAlert() { | ||||
|       this.invalidFeedbackMessage = this.$options.i18n.defaultError; | ||||
|     }, | ||||
|   }, | ||||
|   toastOptions() { | ||||
|     return { | ||||
|       onComplete: () => { | ||||
|         this.projectToBeImported = {}; | ||||
|       }, | ||||
|     }; | ||||
|   }, | ||||
|   i18n: { | ||||
|     buttonText: s__('ImportAProjectModal|Import from a project'), | ||||
|     projectLabel: __('Project'), | ||||
|     modalTitle: s__('ImportAProjectModal|Import members from another project'), | ||||
|     modalIntro: s__( | ||||
|       "ImportAProjectModal|You're importing members to the %{strongStart}%{name}%{strongEnd} project.", | ||||
|     ), | ||||
|     modalHelpText: s__( | ||||
|       'ImportAProjectModal|Only project members (not group members) are imported, and they get the same permissions as the project you import from.', | ||||
|     ), | ||||
|     modalPrimaryButton: s__('ImportAProjectModal|Import project members'), | ||||
|     modalCancelButton: __('Cancel'), | ||||
|     defaultError: s__('ImportAProjectModal|Unable to import project members'), | ||||
|     successMessage: s__('ImportAProjectModal|Successfully imported'), | ||||
|   }, | ||||
|   projectSelectLabelId: 'project-select', | ||||
|   modalId: uniqueId('import-a-project-modal-'), | ||||
|   formClasses: 'gl-mt-3 gl-sm-w-auto gl-w-full', | ||||
|   buttonClasses: 'gl-w-full', | ||||
| }; | ||||
| </script> | ||||
| 
 | ||||
| <template> | ||||
|   <form :class="$options.formClasses"> | ||||
|     <gl-button v-gl-modal="$options.modalId" :class="$options.buttonClasses" variant="default">{{ | ||||
|       $options.i18n.buttonText | ||||
|     }}</gl-button> | ||||
| 
 | ||||
|     <gl-modal | ||||
|       ref="modal" | ||||
|       :modal-id="$options.modalId" | ||||
|       size="sm" | ||||
|       :title="$options.i18n.modalTitle" | ||||
|       ok-variant="danger" | ||||
|       footer-class="gl-bg-gray-10 gl-p-5" | ||||
|     > | ||||
|       <div> | ||||
|         <p ref="modalIntro"> | ||||
|           <gl-sprintf :message="modalIntro"> | ||||
|             <template #strong="{ content }"> | ||||
|               <strong>{{ content }}</strong> | ||||
|             </template> | ||||
|           </gl-sprintf> | ||||
|         </p> | ||||
|         <gl-form-group | ||||
|           :invalid-feedback="invalidFeedbackMessage" | ||||
|           :state="validationState" | ||||
|           data-testid="form-group" | ||||
|         > | ||||
|           <label :id="$options.projectSelectLabelId" class="col-form-label">{{ | ||||
|             $options.i18n.projectLabel | ||||
|           }}</label> | ||||
|           <project-select v-model="projectToBeImported" /> | ||||
|         </gl-form-group> | ||||
|         <p>{{ $options.i18n.modalHelpText }}</p> | ||||
|       </div> | ||||
|       <template #modal-footer> | ||||
|         <div | ||||
|           class="gl-display-flex gl-flex-direction-row gl-justify-content-end gl-flex-wrap gl-m-0" | ||||
|         > | ||||
|           <gl-button data-testid="cancel-button" @click="closeModal"> | ||||
|             {{ $options.i18n.modalCancelButton }} | ||||
|           </gl-button> | ||||
|           <div class="gl-mr-3"></div> | ||||
|           <gl-button | ||||
|             :disabled="importDisabled" | ||||
|             :loading="isLoading" | ||||
|             variant="success" | ||||
|             data-testid="import-button" | ||||
|             @click="submitImport" | ||||
|             >{{ $options.i18n.modalPrimaryButton }}</gl-button | ||||
|           > | ||||
|         </div> | ||||
|       </template> | ||||
|     </gl-modal> | ||||
|   </form> | ||||
| </template> | ||||
|  | @ -0,0 +1,143 @@ | |||
| <script> | ||||
| import { | ||||
|   GlAvatarLabeled, | ||||
|   GlDropdown, | ||||
|   GlDropdownItem, | ||||
|   GlDropdownText, | ||||
|   GlSearchBoxByType, | ||||
| } from '@gitlab/ui'; | ||||
| import { debounce } from 'lodash'; | ||||
| import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; | ||||
| import { s__ } from '~/locale'; | ||||
| import { getProjects } from '~/rest_api'; | ||||
| import { SEARCH_DELAY, GROUP_FILTERS } from '../constants'; | ||||
| 
 | ||||
| export default { | ||||
|   name: 'ProjectSelect', | ||||
|   components: { | ||||
|     GlAvatarLabeled, | ||||
|     GlDropdown, | ||||
|     GlDropdownItem, | ||||
|     GlDropdownText, | ||||
|     GlSearchBoxByType, | ||||
|   }, | ||||
|   model: { | ||||
|     prop: 'selectedProject', | ||||
|   }, | ||||
|   props: { | ||||
|     groupsFilter: { | ||||
|       type: String, | ||||
|       required: false, | ||||
|       default: GROUP_FILTERS.ALL, | ||||
|       validator: (value) => Object.values(GROUP_FILTERS).includes(value), | ||||
|     }, | ||||
|     parentGroupId: { | ||||
|       type: Number, | ||||
|       required: false, | ||||
|       default: 0, | ||||
|     }, | ||||
|   }, | ||||
|   data() { | ||||
|     return { | ||||
|       isFetching: false, | ||||
|       projects: [], | ||||
|       selectedProject: {}, | ||||
|       searchTerm: '', | ||||
|       errorMessage: '', | ||||
|     }; | ||||
|   }, | ||||
|   computed: { | ||||
|     selectedProjectName() { | ||||
|       return this.selectedProject.name || this.$options.i18n.dropdownText; | ||||
|     }, | ||||
|     isFetchResultEmpty() { | ||||
|       return this.projects.length === 0 && !this.isFetching; | ||||
|     }, | ||||
|   }, | ||||
|   watch: { | ||||
|     searchTerm() { | ||||
|       this.retrieveProjects(); | ||||
|     }, | ||||
|   }, | ||||
|   mounted() { | ||||
|     this.retrieveProjects(); | ||||
|   }, | ||||
|   methods: { | ||||
|     retrieveProjects: debounce(function debouncedRetrieveProjects() { | ||||
|       this.isFetching = true; | ||||
|       this.errorMessage = ''; | ||||
|       return this.fetchProjects() | ||||
|         .then((response) => { | ||||
|           this.projects = response.data.map((project) => ({ | ||||
|             ...convertObjectPropsToCamelCase(project), | ||||
|             name: project.name_with_namespace, | ||||
|           })); | ||||
|         }) | ||||
|         .catch(() => { | ||||
|           this.errorMessage = this.$options.i18n.errorFetchingProjects; | ||||
|         }) | ||||
|         .finally(() => { | ||||
|           this.isFetching = false; | ||||
|         }); | ||||
|     }, SEARCH_DELAY), | ||||
|     fetchProjects() { | ||||
|       return getProjects(this.searchTerm, this.$options.defaultFetchOptions); | ||||
|     }, | ||||
|     selectProject(project) { | ||||
|       this.selectedProject = project; | ||||
| 
 | ||||
|       this.$emit('input', this.selectedProject); | ||||
|     }, | ||||
|   }, | ||||
|   i18n: { | ||||
|     dropdownText: s__('ProjectSelect|Select a project'), | ||||
|     searchPlaceholder: s__('ProjectSelect|Search projects'), | ||||
|     emptySearchResult: s__('ProjectSelect|No matching results'), | ||||
|     errorFetchingProjects: s__( | ||||
|       'ProjectSelect|There was an error fetching the projects. Please try again.', | ||||
|     ), | ||||
|   }, | ||||
|   defaultFetchOptions: { | ||||
|     exclude_internal: true, | ||||
|     active: true, | ||||
|   }, | ||||
| }; | ||||
| </script> | ||||
| <template> | ||||
|   <div> | ||||
|     <gl-dropdown | ||||
|       data-testid="project-select-dropdown" | ||||
|       :text="selectedProjectName" | ||||
|       toggle-class="gl-mb-2" | ||||
|       block | ||||
|       menu-class="gl-w-full!" | ||||
|     > | ||||
|       <gl-search-box-by-type | ||||
|         v-model="searchTerm" | ||||
|         :is-loading="isFetching" | ||||
|         :placeholder="$options.i18n.searchPlaceholder" | ||||
|         data-qa-selector="project_select_dropdown_search_field" | ||||
|       /> | ||||
|       <gl-dropdown-item | ||||
|         v-for="project in projects" | ||||
|         :key="project.id" | ||||
|         :name="project.name" | ||||
|         @click="selectProject(project)" | ||||
|       > | ||||
|         <gl-avatar-labeled | ||||
|           :label="project.name" | ||||
|           :src="project.avatarUrl" | ||||
|           :entity-id="project.id" | ||||
|           :entity-name="project.name" | ||||
|           :size="32" | ||||
|         /> | ||||
|       </gl-dropdown-item> | ||||
|       <gl-dropdown-text v-if="errorMessage" data-testid="error-message"> | ||||
|         <span class="gl-text-gray-500">{{ errorMessage }}</span> | ||||
|       </gl-dropdown-text> | ||||
|       <gl-dropdown-text v-else-if="isFetchResultEmpty" data-testid="empty-result-message"> | ||||
|         <span class="gl-text-gray-500">{{ $options.i18n.emptySearchResult }}</span> | ||||
|       </gl-dropdown-text> | ||||
|     </gl-dropdown> | ||||
|   </div> | ||||
| </template> | ||||
|  | @ -0,0 +1,23 @@ | |||
| import Vue from 'vue'; | ||||
| import ImportAProjectModal from '~/invite_members/components/import_a_project_modal.vue'; | ||||
| 
 | ||||
| export default function initImportAProjectModal() { | ||||
|   const el = document.querySelector('.js-import-a-project-modal'); | ||||
| 
 | ||||
|   if (!el) { | ||||
|     return false; | ||||
|   } | ||||
| 
 | ||||
|   const { projectId, projectName } = el.dataset; | ||||
| 
 | ||||
|   return new Vue({ | ||||
|     el, | ||||
|     render: (createElement) => | ||||
|       createElement(ImportAProjectModal, { | ||||
|         props: { | ||||
|           projectId, | ||||
|           projectName, | ||||
|         }, | ||||
|       }), | ||||
|   }); | ||||
| } | ||||
|  | @ -1,4 +1,5 @@ | |||
| import groupsSelect from '~/groups_select'; | ||||
| import initImportAProjectModal from '~/invite_members/init_import_a_project_modal'; | ||||
| import initInviteGroupTrigger from '~/invite_members/init_invite_group_trigger'; | ||||
| import initInviteMembersForm from '~/invite_members/init_invite_members_form'; | ||||
| import initInviteMembersModal from '~/invite_members/init_invite_members_modal'; | ||||
|  | @ -14,6 +15,7 @@ import UsersSelect from '~/users_select'; | |||
| groupsSelect(); | ||||
| memberExpirationDate(); | ||||
| memberExpirationDate('.js-access-expiration-date-groups'); | ||||
| initImportAProjectModal(); | ||||
| initInviteMembersModal(); | ||||
| initInviteMembersTrigger(); | ||||
| initInviteGroupTrigger(); | ||||
|  |  | |||
|  | @ -0,0 +1,52 @@ | |||
| import { s__ } from '~/locale'; | ||||
| 
 | ||||
| export const PIPELINE_SOURCES = [ | ||||
|   { | ||||
|     text: s__('Pipeline|Source|Push'), | ||||
|     value: 'push', | ||||
|   }, | ||||
|   { | ||||
|     text: s__('Pipeline|Source|Web'), | ||||
|     value: 'web', | ||||
|   }, | ||||
|   { | ||||
|     text: s__('Pipeline|Source|Trigger'), | ||||
|     value: 'trigger', | ||||
|   }, | ||||
|   { | ||||
|     text: s__('Pipeline|Source|Schedule'), | ||||
|     value: 'schedule', | ||||
|   }, | ||||
|   { | ||||
|     text: s__('Pipeline|Source|API'), | ||||
|     value: 'api', | ||||
|   }, | ||||
|   { | ||||
|     text: s__('Pipeline|Source|External'), | ||||
|     value: 'external', | ||||
|   }, | ||||
|   { | ||||
|     text: s__('Pipeline|Source|Pipeline'), | ||||
|     value: 'pipeline', | ||||
|   }, | ||||
|   { | ||||
|     text: s__('Pipeline|Source|Chat'), | ||||
|     value: 'chat', | ||||
|   }, | ||||
|   { | ||||
|     text: s__('Pipeline|Source|Web IDE'), | ||||
|     value: 'webide', | ||||
|   }, | ||||
|   { | ||||
|     text: s__('Pipeline|Source|Merge Request'), | ||||
|     value: 'merge_request_event', | ||||
|   }, | ||||
|   { | ||||
|     text: s__('Pipeline|Source|External Pull Request'), | ||||
|     value: 'external_pull_request_event', | ||||
|   }, | ||||
|   { | ||||
|     text: s__('Pipeline|Source|Parent Pipeline'), | ||||
|     value: 'parent_pipeline', | ||||
|   }, | ||||
| ]; | ||||
|  | @ -1,8 +1,9 @@ | |||
| <script> | ||||
| import { GlFilteredSearchToken, GlFilteredSearchSuggestion } from '@gitlab/ui'; | ||||
| import { s__ } from '~/locale'; | ||||
| import { PIPELINE_SOURCES } from 'ee_else_ce/pipelines/components/pipelines_list/tokens/constants'; | ||||
| 
 | ||||
| export default { | ||||
|   PIPELINE_SOURCES, | ||||
|   components: { | ||||
|     GlFilteredSearchToken, | ||||
|     GlFilteredSearchSuggestion, | ||||
|  | @ -18,68 +19,8 @@ export default { | |||
|     }, | ||||
|   }, | ||||
|   computed: { | ||||
|     sources() { | ||||
|       return [ | ||||
|         { | ||||
|           text: s__('Pipeline|Source|Push'), | ||||
|           value: 'push', | ||||
|         }, | ||||
|         { | ||||
|           text: s__('Pipeline|Source|Web'), | ||||
|           value: 'web', | ||||
|         }, | ||||
|         { | ||||
|           text: s__('Pipeline|Source|Trigger'), | ||||
|           value: 'trigger', | ||||
|         }, | ||||
|         { | ||||
|           text: s__('Pipeline|Source|Schedule'), | ||||
|           value: 'schedule', | ||||
|         }, | ||||
|         { | ||||
|           text: s__('Pipeline|Source|API'), | ||||
|           value: 'api', | ||||
|         }, | ||||
|         { | ||||
|           text: s__('Pipeline|Source|External'), | ||||
|           value: 'external', | ||||
|         }, | ||||
|         { | ||||
|           text: s__('Pipeline|Source|Pipeline'), | ||||
|           value: 'pipeline', | ||||
|         }, | ||||
|         { | ||||
|           text: s__('Pipeline|Source|Chat'), | ||||
|           value: 'chat', | ||||
|         }, | ||||
|         { | ||||
|           text: s__('Pipeline|Source|Web IDE'), | ||||
|           value: 'webide', | ||||
|         }, | ||||
|         { | ||||
|           text: s__('Pipeline|Source|Merge Request'), | ||||
|           value: 'merge_request_event', | ||||
|         }, | ||||
|         { | ||||
|           text: s__('Pipeline|Source|External Pull Request'), | ||||
|           value: 'external_pull_request_event', | ||||
|         }, | ||||
|         { | ||||
|           text: s__('Pipeline|Source|Parent Pipeline'), | ||||
|           value: 'parent_pipeline', | ||||
|         }, | ||||
|         { | ||||
|           text: s__('Pipeline|Source|On-Demand DAST Scan'), | ||||
|           value: 'ondemand_dast_scan', | ||||
|         }, | ||||
|         { | ||||
|           text: s__('Pipeline|Source|On-Demand DAST Validation'), | ||||
|           value: 'ondemand_dast_validation', | ||||
|         }, | ||||
|       ]; | ||||
|     }, | ||||
|     findActiveSource() { | ||||
|       return this.sources.find((source) => source.value === this.value.data); | ||||
|     activeSource() { | ||||
|       return PIPELINE_SOURCES.find((source) => source.value === this.value.data); | ||||
|     }, | ||||
|   }, | ||||
| }; | ||||
|  | @ -89,13 +30,13 @@ export default { | |||
|   <gl-filtered-search-token v-bind="{ ...$props, ...$attrs }" v-on="$listeners"> | ||||
|     <template #view> | ||||
|       <div class="gl-display-flex gl-align-items-center"> | ||||
|         <span>{{ findActiveSource.text }}</span> | ||||
|         <span>{{ activeSource.text }}</span> | ||||
|       </div> | ||||
|     </template> | ||||
| 
 | ||||
|     <template #suggestions> | ||||
|       <gl-filtered-search-suggestion | ||||
|         v-for="source in sources" | ||||
|         v-for="source in $options.PIPELINE_SOURCES" | ||||
|         :key="source.value" | ||||
|         :value="source.value" | ||||
|       > | ||||
|  |  | |||
|  | @ -9,8 +9,6 @@ module Projects | |||
| 
 | ||||
|       def show | ||||
|         @package = project.packages.find(params[:id]) | ||||
|         @package_files = @package.package_files.recent | ||||
|         @maven_metadatum = @package.maven_metadatum | ||||
|       end | ||||
|     end | ||||
|   end | ||||
|  |  | |||
|  | @ -0,0 +1,58 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| module Mutations | ||||
|   module CustomerRelations | ||||
|     module Organizations | ||||
|       class Create < BaseMutation | ||||
|         include ResolvesIds | ||||
|         include Gitlab::Graphql::Authorize::AuthorizeResource | ||||
| 
 | ||||
|         graphql_name 'CustomerRelationsOrganizationCreate' | ||||
| 
 | ||||
|         field :organization, | ||||
|               Types::CustomerRelations::OrganizationType, | ||||
|               null: true, | ||||
|               description: 'Organization after the mutation.' | ||||
| 
 | ||||
|         argument :group_id, ::Types::GlobalIDType[::Group], | ||||
|                  required: true, | ||||
|                  description: 'Group for the organization.' | ||||
| 
 | ||||
|         argument :name, | ||||
|                  GraphQL::Types::String, | ||||
|                  required: true, | ||||
|                  description: 'Name of the organization.' | ||||
| 
 | ||||
|         argument :default_rate, | ||||
|                  GraphQL::Types::Float, | ||||
|                  required: false, | ||||
|                  description: 'Standard billing rate for the organization.' | ||||
| 
 | ||||
|         argument :description, | ||||
|                  GraphQL::Types::String, | ||||
|                  required: false, | ||||
|                  description: 'Description or notes for the organization.' | ||||
| 
 | ||||
|         authorize :admin_organization | ||||
| 
 | ||||
|         def resolve(args) | ||||
|           group = authorized_find!(id: args[:group_id]) | ||||
| 
 | ||||
|           raise Gitlab::Graphql::Errors::ResourceNotAvailable, 'Feature disabled' unless Feature.enabled?(:customer_relations, group) | ||||
| 
 | ||||
|           result = ::CustomerRelations::Organizations::CreateService.new(group: group, current_user: current_user, params: args).execute | ||||
| 
 | ||||
|           if result.success? | ||||
|             { organization: result.payload } | ||||
|           else | ||||
|             { errors: result.errors } | ||||
|           end | ||||
|         end | ||||
| 
 | ||||
|         def find_object(id:) | ||||
|           GitlabSchema.object_from_id(id, expected_type: ::Group) | ||||
|         end | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -14,7 +14,7 @@ module Types | |||
| 
 | ||||
|       field :name, | ||||
|             GraphQL::Types::String, | ||||
|             null: true, | ||||
|             null: false, | ||||
|             description: 'Name of the organization.' | ||||
| 
 | ||||
|       field :default_rate, | ||||
|  |  | |||
|  | @ -33,6 +33,7 @@ module Types | |||
|     mount_mutation Mutations::Branches::Create, calls_gitaly: true | ||||
|     mount_mutation Mutations::Commits::Create, calls_gitaly: true | ||||
|     mount_mutation Mutations::CustomEmoji::Create, feature_flag: :custom_emoji | ||||
|     mount_mutation Mutations::CustomerRelations::Organizations::Create | ||||
|     mount_mutation Mutations::Discussions::ToggleResolve | ||||
|     mount_mutation Mutations::DependencyProxy::ImageTtlGroupPolicy::Update | ||||
|     mount_mutation Mutations::Environments::CanaryIngress::Update | ||||
|  |  | |||
|  | @ -17,6 +17,8 @@ module DesignManagement | |||
| 
 | ||||
|     # we assume sequential ordering. | ||||
|     scope :ordered, -> { order(version_id: :asc) } | ||||
|     scope :by_design, -> (design) { where(design: design) } | ||||
|     scope :by_event, -> (event) { where(event: event) } | ||||
| 
 | ||||
|     # For each design, only select the most recent action | ||||
|     scope :most_recent, -> do | ||||
|  |  | |||
|  | @ -100,11 +100,13 @@ class EnvironmentStatus | |||
|   def self.build_environments_status(mr, user, pipeline) | ||||
|     return [] unless pipeline | ||||
| 
 | ||||
|     pipeline.environments_in_self_and_descendants.includes(:project).available.map do |environment| | ||||
|       next unless Ability.allowed?(user, :read_environment, environment) | ||||
|     ::Gitlab::Database.allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/340781') do | ||||
|       pipeline.environments_in_self_and_descendants.includes(:project).available.map do |environment| | ||||
|         next unless Ability.allowed?(user, :read_environment, environment) | ||||
| 
 | ||||
|       EnvironmentStatus.new(pipeline.project, environment, mr, pipeline.sha) | ||||
|     end.compact | ||||
|         EnvironmentStatus.new(pipeline.project, environment, mr, pipeline.sha) | ||||
|       end.compact | ||||
|     end | ||||
|   end | ||||
|   private_class_method :build_environments_status | ||||
| end | ||||
|  |  | |||
|  | @ -16,6 +16,7 @@ class PagesDeployment < ApplicationRecord | |||
|   scope :migrated_from_legacy_storage, -> { where(file: MIGRATED_FILE_NAME) } | ||||
|   scope :with_files_stored_locally, -> { where(file_store: ::ObjectStorage::Store::LOCAL) } | ||||
|   scope :with_files_stored_remotely, -> { where(file_store: ::ObjectStorage::Store::REMOTE) } | ||||
|   scope :project_id_in, ->(ids) { where(project_id: ids) } | ||||
| 
 | ||||
|   validates :file, presence: true | ||||
|   validates :file_store, presence: true, inclusion: { in: ObjectStorage::SUPPORTED_STORES } | ||||
|  |  | |||
|  | @ -145,6 +145,7 @@ class GroupPolicy < BasePolicy | |||
|     enable :read_prometheus | ||||
|     enable :read_package | ||||
|     enable :read_package_settings | ||||
|     enable :admin_organization | ||||
|   end | ||||
| 
 | ||||
|   rule { maintainer }.policy do | ||||
|  |  | |||
|  | @ -2,12 +2,12 @@ | |||
| 
 | ||||
| # Base class, scoped by container (project or group). | ||||
| # | ||||
| # New or existing services which only require project as a container | ||||
| # should subclass BaseProjectService. | ||||
| # New or existing services which only require a project or group container | ||||
| # should subclass BaseProjectService or BaseGroupService. | ||||
| # | ||||
| # If you require a different but specific, non-polymorphic container (such | ||||
| # as group), consider creating a new subclass such as BaseGroupService, | ||||
| # and update the related comment at the top of the original BaseService. | ||||
| # If you require a different but specific, non-polymorphic container | ||||
| # consider creating a new subclass, and update the related comment at | ||||
| # the top of the original BaseService. | ||||
| class BaseContainerService | ||||
|   include BaseServiceUtility | ||||
| 
 | ||||
|  |  | |||
|  | @ -0,0 +1,12 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| # Base class, scoped by group | ||||
| class BaseGroupService < ::BaseContainerService # rubocop:disable Gitlab/NamespacedClass | ||||
|   attr_accessor :group | ||||
| 
 | ||||
|   def initialize(group:, current_user: nil, params: {}) | ||||
|     super(container: group, current_user: current_user, params: params) | ||||
| 
 | ||||
|     @group = group | ||||
|   end | ||||
| end | ||||
|  | @ -10,6 +10,7 @@ | |||
| # | ||||
| # - BaseContainerService for services scoped by container (project or group) | ||||
| # - BaseProjectService for services scoped to projects | ||||
| # - BaseGroupService for services scoped to groups | ||||
| # | ||||
| # or, create a new base class and update this comment. | ||||
| class BaseService | ||||
|  |  | |||
|  | @ -0,0 +1,38 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| module CustomerRelations | ||||
|   module Organizations | ||||
|     class CreateService < ::BaseGroupService | ||||
|       # returns the created organization | ||||
|       def execute | ||||
|         return error_no_permissions unless allowed? | ||||
| 
 | ||||
|         params[:group_id] = group.id | ||||
| 
 | ||||
|         organization = Organization.create(params) | ||||
| 
 | ||||
|         return error_creating(organization) unless organization.persisted? | ||||
| 
 | ||||
|         ServiceResponse.success(payload: organization) | ||||
|       end | ||||
| 
 | ||||
|       private | ||||
| 
 | ||||
|       def allowed? | ||||
|         current_user&.can?(:admin_organization, group) | ||||
|       end | ||||
| 
 | ||||
|       def error(message) | ||||
|         ServiceResponse.error(message: message) | ||||
|       end | ||||
| 
 | ||||
|       def error_no_permissions | ||||
|         error('You have insufficient permissions to create an organization for this group') | ||||
|       end | ||||
| 
 | ||||
|       def error_creating(organization) | ||||
|         error(organization&.errors&.full_messages || 'Failed to create organization') | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -17,6 +17,7 @@ module DesignManagement | |||
|       version = delete_designs! | ||||
|       EventCreateService.new.destroy_designs(designs, current_user) | ||||
|       Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_designs_removed_action(author: current_user) | ||||
|       TodosDestroyer::DestroyedDesignsWorker.perform_async(designs.map(&:id)) | ||||
| 
 | ||||
|       success(version: version) | ||||
|     end | ||||
|  |  | |||
|  | @ -9,9 +9,9 @@ module ErrorTracking | |||
|       error = project.error_tracking_errors.report_error( | ||||
|         name: exception['type'], # Example: ActionView::MissingTemplate | ||||
|         description: exception['value'], # Example: Missing template posts/show in... | ||||
|         actor: event['transaction'], # Example: PostsController#show | ||||
|         actor: actor, # Example: PostsController#show | ||||
|         platform: event['platform'], # Example: ruby | ||||
|         timestamp: event['timestamp'] | ||||
|         timestamp: timestamp | ||||
|       ) | ||||
| 
 | ||||
|       # The payload field contains all the data on error including stacktrace in jsonb. | ||||
|  | @ -20,7 +20,7 @@ module ErrorTracking | |||
|         environment: event['environment'], | ||||
|         description: exception['value'], | ||||
|         level: event['level'], | ||||
|         occurred_at: event['timestamp'], | ||||
|         occurred_at: timestamp, | ||||
|         payload: event | ||||
|       ) | ||||
|     end | ||||
|  | @ -34,5 +34,29 @@ module ErrorTracking | |||
|     def exception | ||||
|       event['exception']['values'].first | ||||
|     end | ||||
| 
 | ||||
|     def actor | ||||
|       return event['transaction'] if event['transaction'] | ||||
| 
 | ||||
|       # Some SDK do not have transaction attribute. | ||||
|       # So we build it by combining function name and module name from | ||||
|       # the last item in stacktrace. | ||||
|       last_line = exception.dig('stacktrace', 'frames').last | ||||
| 
 | ||||
|       "#{last_line['function']}(#{last_line['module']})" | ||||
|     end | ||||
| 
 | ||||
|     def timestamp | ||||
|       return @timestamp if @timestamp | ||||
| 
 | ||||
|       @timestamp = (event['timestamp'] || Time.zone.now) | ||||
| 
 | ||||
|       # Some SDK send timestamp in numeric format like '1630945472.13'. | ||||
|       if @timestamp.to_s =~ /\A\d+(\.\d+)?\z/ | ||||
|         @timestamp = Time.zone.at(@timestamp.to_f) | ||||
|       end | ||||
| 
 | ||||
|       @timestamp | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  |  | |||
|  | @ -0,0 +1,28 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| module Todos | ||||
|   module Destroy | ||||
|     # Service class for deleting todos that belongs to a deleted/archived design. | ||||
|     class DesignService | ||||
|       attr_reader :design_ids | ||||
| 
 | ||||
|       def initialize(design_ids) | ||||
|         @design_ids = design_ids | ||||
|       end | ||||
| 
 | ||||
|       def execute | ||||
|         todos.delete_all | ||||
|       end | ||||
| 
 | ||||
|       private | ||||
| 
 | ||||
|       def todos | ||||
|         Todo.for_target(deleted_designs.select(:design_id)).for_type(DesignManagement::Design) | ||||
|       end | ||||
| 
 | ||||
|       def deleted_designs | ||||
|         DesignManagement::Action.by_design(design_ids).by_event(:deletion) | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -2,48 +2,15 @@ | |||
|   "description": "Error tracking event payload", | ||||
|   "type": "object", | ||||
|   "required": [], | ||||
|   "modules": { | ||||
|     "type": "object" | ||||
|   }, | ||||
|   "properties": { | ||||
|     "event_id": { | ||||
|       "type": "string" | ||||
|     }, | ||||
|     "level": { | ||||
|       "type": "string" | ||||
|     }, | ||||
|     "timestamp": { | ||||
|       "type": "string" | ||||
|     }, | ||||
|     "release": { | ||||
|       "type": "string" | ||||
|     }, | ||||
|     "environment": { | ||||
|       "type": "string" | ||||
|     }, | ||||
|     "server_name": { | ||||
|       "type": "string" | ||||
|     }, | ||||
|     "message": { | ||||
|       "type": "string" | ||||
|     }, | ||||
|     "contexts": { | ||||
|       "type": "object" | ||||
|     }, | ||||
|     "platform": { | ||||
|       "type": "string" | ||||
|     }, | ||||
|     "sdk": { | ||||
|       "type": "object", | ||||
|       "required": [], | ||||
|       "properties": { | ||||
|         "name": { | ||||
|           "type": "string" | ||||
|         }, | ||||
|         "version": { | ||||
|           "type": "string" | ||||
|         } | ||||
|       } | ||||
|       "type": "object" | ||||
|     }, | ||||
|     "exception": { | ||||
|       "type": "object", | ||||
|  |  | |||
|  | @ -18,10 +18,7 @@ | |||
|         .col-md-12.col-lg-6 | ||||
|           .gl-display-flex.gl-flex-wrap.gl-justify-content-end | ||||
|             - if can_admin_project_member?(@project) | ||||
|               = link_to _("Import a project"), | ||||
|                 import_project_project_members_path(@project), | ||||
|                 class: "btn btn-default btn-md gl-button gl-mt-3 gl-sm-w-auto gl-w-full", | ||||
|                 title: _("Import members from another project") | ||||
|               .js-import-a-project-modal{ data: { project_id: @project.id, project_name: @project.name } } | ||||
|             - if @project.allowed_to_share_with_group? | ||||
|               .js-invite-group-trigger{ data: { classes: 'gl-mt-3 gl-sm-w-auto gl-w-full gl-sm-ml-3', display_text: _('Invite a group') } } | ||||
|             - if can_admin_project_member?(@project) | ||||
|  |  | |||
|  | @ -1681,6 +1681,15 @@ | |||
|   :weight: 1 | ||||
|   :idempotent: | ||||
|   :tags: [] | ||||
| - :name: todos_destroyer:todos_destroyer_destroyed_designs | ||||
|   :worker_name: TodosDestroyer::DestroyedDesignsWorker | ||||
|   :feature_category: :issue_tracking | ||||
|   :has_external_dependencies: | ||||
|   :urgency: :low | ||||
|   :resource_boundary: :unknown | ||||
|   :weight: 1 | ||||
|   :idempotent: true | ||||
|   :tags: [] | ||||
| - :name: todos_destroyer:todos_destroyer_destroyed_issuable | ||||
|   :worker_name: TodosDestroyer::DestroyedIssuableWorker | ||||
|   :feature_category: :issue_tracking | ||||
|  |  | |||
|  | @ -0,0 +1,18 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| module TodosDestroyer | ||||
|   class DestroyedDesignsWorker | ||||
|     include ApplicationWorker | ||||
| 
 | ||||
|     data_consistency :always | ||||
| 
 | ||||
|     sidekiq_options retry: 3 | ||||
|     include TodosDestroyerQueue | ||||
| 
 | ||||
|     idempotent! | ||||
| 
 | ||||
|     def perform(design_ids) | ||||
|       ::Todos::Destroy::DesignService.new(design_ids).execute | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -0,0 +1,8 @@ | |||
| --- | ||||
| name: customer_relations | ||||
| introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/69472 | ||||
| rollout_issue_url:  | ||||
| milestone: '14.3' | ||||
| type: development | ||||
| group: group::product planning | ||||
| default_enabled: false | ||||
|  | @ -0,0 +1,8 @@ | |||
| --- | ||||
| name: gitaly_backup | ||||
| introduced_by_url: https://gitlab.com/gitlab-org/gitaly/-/merge_requests/3554 | ||||
| rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/333034 | ||||
| milestone: '14.0' | ||||
| type: development | ||||
| group: group::gitaly | ||||
| default_enabled: true | ||||
|  | @ -2,6 +2,10 @@ | |||
| 
 | ||||
| app = Rails.application | ||||
| 
 | ||||
| # Disable Sendfile for Sidekiq Web assets since Workhorse won't | ||||
| # always have access to these files. | ||||
| app.config.middleware.insert_before(Rack::Sendfile, Gitlab::Middleware::SidekiqWebStatic) | ||||
| 
 | ||||
| if app.config.public_file_server.enabled | ||||
|   # The `ActionDispatch::Static` middleware intercepts requests for static files | ||||
|   # by checking if they exist in the `/public` directory. | ||||
|  |  | |||
|  | @ -186,14 +186,12 @@ queue group](extra_sidekiq_processes.md#start-multiple-processes). | |||
| 
 | ||||
| The following table shows the workers that should have their own queue: | ||||
| 
 | ||||
| <!-- markdownlint-disable MD044 --> | ||||
| | Worker name | Queue name | GitLab issue | | ||||
| | --- | --- | --- | | ||||
| | EmailReceiverWorker | `email_receiver` | [gitlab-com/gl-infra/scalability#1263](https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1263) | | ||||
| | ServiceDeskEmailReceiverWorker | `service_desk_email_receiver` | [gitlab-com/gl-infra/scalability#1263](https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1263) | | ||||
| | ProjectImportScheduleWorker | `project_import_schedule` | [gitlab-org/gitlab#340630](https://gitlab.com/gitlab-org/gitlab/-/issues/340630) | | ||||
| | HashedStorage::MigratorWorker | `hashed_storage:hashed_storage_migrator` | [gitlab-org/gitlab#340629](https://gitlab.com/gitlab-org/gitlab/-/issues/340629) | | ||||
| | HashedStorage::ProjectMigrateWorker | `hashed_storage:hashed_storage_project_migrate` | [gitlab-org/gitlab#340629](https://gitlab.com/gitlab-org/gitlab/-/issues/340629) | | ||||
| | HashedStorage::ProjectRollbackWorker | `hashed_storage:hashed_storage_project_rollback` | [gitlab-org/gitlab#340629](https://gitlab.com/gitlab-org/gitlab/-/issues/340629) | | ||||
| | HashedStorage::RollbackerWorker | `hashed_storage:hashed_storage_rollbacker` | [gitlab-org/gitlab#340629](https://gitlab.com/gitlab-org/gitlab/-/issues/340629) | | ||||
| <!-- markdownlint-disable MD044 --> | ||||
| | `EmailReceiverWorker` | `email_receiver` | [`gitlab-com/gl-infra/scalability#1263`](https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1263) | | ||||
| | `ServiceDeskEmailReceiverWorker` | `service_desk_email_receiver` | [`gitlab-com/gl-infra/scalability#1263`](https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/1263) | | ||||
| | `ProjectImportScheduleWorker` | `project_import_schedule` | [`gitlab-org/gitlab#340630`](https://gitlab.com/gitlab-org/gitlab/-/issues/340630) | | ||||
| | `HashedStorage::MigratorWorker` | `hashed_storage:hashed_storage_migrator` | [`gitlab-org/gitlab#340629`](https://gitlab.com/gitlab-org/gitlab/-/issues/340629) | | ||||
| | `HashedStorage::ProjectMigrateWorker` | `hashed_storage:hashed_storage_project_migrate` | [`gitlab-org/gitlab#340629`](https://gitlab.com/gitlab-org/gitlab/-/issues/340629) | | ||||
| | `HashedStorage::ProjectRollbackWorker` | `hashed_storage:hashed_storage_project_rollback` | [`gitlab-org/gitlab#340629`](https://gitlab.com/gitlab-org/gitlab/-/issues/340629) | | ||||
| | `HashedStorage::RollbackerWorker` | `hashed_storage:hashed_storage_rollbacker` | [`gitlab-org/gitlab#340629`](https://gitlab.com/gitlab-org/gitlab/-/issues/340629) | | ||||
|  |  | |||
|  | @ -1407,6 +1407,28 @@ Input type: `CreateTestCaseInput` | |||
| | <a id="mutationcreatetestcaseerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. | | ||||
| | <a id="mutationcreatetestcasetestcase"></a>`testCase` | [`Issue`](#issue) | Test case created. | | ||||
| 
 | ||||
| ### `Mutation.customerRelationsOrganizationCreate` | ||||
| 
 | ||||
| Input type: `CustomerRelationsOrganizationCreateInput` | ||||
| 
 | ||||
| #### Arguments | ||||
| 
 | ||||
| | Name | Type | Description | | ||||
| | ---- | ---- | ----------- | | ||||
| | <a id="mutationcustomerrelationsorganizationcreateclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. | | ||||
| | <a id="mutationcustomerrelationsorganizationcreatedefaultrate"></a>`defaultRate` | [`Float`](#float) | Standard billing rate for the organization. | | ||||
| | <a id="mutationcustomerrelationsorganizationcreatedescription"></a>`description` | [`String`](#string) | Description or notes for the organization. | | ||||
| | <a id="mutationcustomerrelationsorganizationcreategroupid"></a>`groupId` | [`GroupID!`](#groupid) | Group for the organization. | | ||||
| | <a id="mutationcustomerrelationsorganizationcreatename"></a>`name` | [`String!`](#string) | Name of the organization. | | ||||
| 
 | ||||
| #### Fields | ||||
| 
 | ||||
| | Name | Type | Description | | ||||
| | ---- | ---- | ----------- | | ||||
| | <a id="mutationcustomerrelationsorganizationcreateclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. | | ||||
| | <a id="mutationcustomerrelationsorganizationcreateerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. | | ||||
| | <a id="mutationcustomerrelationsorganizationcreateorganization"></a>`organization` | [`CustomerRelationsOrganization`](#customerrelationsorganization) | Organization after the mutation. | | ||||
| 
 | ||||
| ### `Mutation.dastOnDemandScanCreate` | ||||
| 
 | ||||
| Input type: `DastOnDemandScanCreateInput` | ||||
|  | @ -8652,7 +8674,7 @@ A custom emoji uploaded by user. | |||
| | <a id="customerrelationsorganizationdefaultrate"></a>`defaultRate` | [`Float`](#float) | Standard billing rate for the organization. | | ||||
| | <a id="customerrelationsorganizationdescription"></a>`description` | [`String`](#string) | Description or notes for the organization. | | ||||
| | <a id="customerrelationsorganizationid"></a>`id` | [`ID!`](#id) | Internal ID of the organization. | | ||||
| | <a id="customerrelationsorganizationname"></a>`name` | [`String`](#string) | Name of the organization. | | ||||
| | <a id="customerrelationsorganizationname"></a>`name` | [`String!`](#string) | Name of the organization. | | ||||
| | <a id="customerrelationsorganizationupdatedat"></a>`updatedAt` | [`Time!`](#time) | Timestamp the organization was last updated. | | ||||
| 
 | ||||
| ### `DastProfile` | ||||
|  |  | |||
|  | @ -158,6 +158,8 @@ Example response: | |||
| ] | ||||
| ``` | ||||
| 
 | ||||
| To view more than the first 20 runners, use [pagination](index.md#pagination). | ||||
| 
 | ||||
| ## Get runner's details | ||||
| 
 | ||||
| Get details of a runner. | ||||
|  |  | |||
|  | @ -7,7 +7,7 @@ description: Learn how documentation review apps work. | |||
| 
 | ||||
| # Documentation review apps | ||||
| 
 | ||||
| If your merge request contains documentation changes, you can use a review app to preview | ||||
| If you're a GitLab team member and your merge request contains documentation changes, you can use a review app to preview | ||||
| how they would look if they were deployed to the [GitLab Docs site](https://docs.gitlab.com). | ||||
| 
 | ||||
| Review apps are enabled for the following projects: | ||||
|  |  | |||
|  | @ -1480,8 +1480,16 @@ If this happens, examine the following: | |||
| ### `gitaly-backup` for repository backup and restore **(FREE SELF)** | ||||
| 
 | ||||
| > - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/333034) in GitLab 14.2. | ||||
| > - [Deployed behind a feature flag](../user/feature_flags.md), enabled by default. | ||||
| > - Recommended for production use. | ||||
| > - For GitLab self-managed instances, GitLab administrators can opt to [disable it](#disable-or-enable-gitaly-backup). | ||||
| 
 | ||||
| There can be | ||||
| [risks when disabling released features](../administration/feature_flags.md#risks-when-disabling-released-features). | ||||
| Refer to this feature's version history for more details. | ||||
| 
 | ||||
| `gitaly-backup` is used by the backup Rake task to create and restore repository backups from Gitaly. | ||||
| `gitaly-backup` replaces the previous backup method that directly calls RPCs on Gitaly from GitLab. | ||||
| 
 | ||||
| The backup Rake task must be able to find this executable. It can be configured in Omnibus GitLab packages: | ||||
| 
 | ||||
|  | @ -1493,3 +1501,22 @@ The backup Rake task must be able to find this executable. It can be configured | |||
| 
 | ||||
| 1. [Reconfigure GitLab](../administration/restart_gitlab.md#omnibus-gitlab-reconfigure) | ||||
|    for the changes to take effect | ||||
| 
 | ||||
| #### Disable or enable `gitaly-backup` | ||||
| 
 | ||||
| `gitaly-backup` is under development but ready for production use. | ||||
| It is deployed behind a feature flag that is **enabled by default**. | ||||
| [GitLab administrators with access to the GitLab Rails console](../administration/feature_flags.md) | ||||
| can opt to disable it. | ||||
| 
 | ||||
| To disable it: | ||||
| 
 | ||||
| ```ruby | ||||
| Feature.disable(:gitaly_backup) | ||||
| ``` | ||||
| 
 | ||||
| To enable it: | ||||
| 
 | ||||
| ```ruby | ||||
| Feature.enable(:gitaly_backup) | ||||
| ``` | ||||
|  |  | |||
|  | @ -429,3 +429,18 @@ query ProjectTerraformStates { | |||
| 
 | ||||
| For those new to the GitLab GraphQL API, read | ||||
| [Getting started with GitLab GraphQL API](../../../api/graphql/getting_started.md). | ||||
| 
 | ||||
| ## Troubleshooting | ||||
| 
 | ||||
| ### Unable to lock Terraform state files in CI jobs for `terraform apply` using a plan created in a previous job | ||||
| 
 | ||||
| When passing `-backend-config=` to `terraform init`, Terraform persists these values inside the plan | ||||
| cache file. This includes the `password` value. | ||||
| 
 | ||||
| As a result, to create a plan and later use the same plan in another CI job, you might get the error | ||||
| `Error: Error acquiring the state lock` errors when using `-backend-config=password=$CI_JOB_TOKEN`. | ||||
| This happens because the value of `$CI_JOB_TOKEN` is only valid for the duration of the current job. | ||||
| 
 | ||||
| As a workaround, use [http backend configuration variables](https://www.terraform.io/docs/language/settings/backends/http.html#configuration-variables) in your CI job, | ||||
| which is what happens behind the scenes when following the | ||||
| [Get started using GitLab CI](#get-started-using-gitlab-ci) instructions. | ||||
|  |  | |||
|  | @ -8,6 +8,8 @@ module API | |||
|     feature_category :error_tracking | ||||
| 
 | ||||
|     content_type :envelope, 'application/x-sentry-envelope' | ||||
|     content_type :json, 'application/json' | ||||
|     content_type :txt, 'text/plain' | ||||
|     default_format :envelope | ||||
| 
 | ||||
|     before do | ||||
|  | @ -33,17 +35,24 @@ module API | |||
|       end | ||||
| 
 | ||||
|       def active_client_key? | ||||
|         begin | ||||
|           public_key = ::ErrorTracking::Collector::SentryAuthParser.parse(request)[:public_key] | ||||
|         rescue StandardError | ||||
|           bad_request!('Failed to parse sentry request') | ||||
|         end | ||||
|         public_key = extract_public_key | ||||
| 
 | ||||
|         find_client_key(public_key) | ||||
|       end | ||||
| 
 | ||||
|       def extract_public_key | ||||
|         # Some SDK send public_key as a param. In this case we don't need to parse headers. | ||||
|         return params[:sentry_key] if params[:sentry_key].present? | ||||
| 
 | ||||
|         begin | ||||
|           ::ErrorTracking::Collector::SentryAuthParser.parse(request)[:public_key] | ||||
|         rescue StandardError | ||||
|           bad_request!('Failed to parse sentry request') | ||||
|         end | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     desc 'Submit error tracking event to the project' do | ||||
|     desc 'Submit error tracking event to the project as envelope' do | ||||
|       detail 'This feature was introduced in GitLab 14.1.' | ||||
|     end | ||||
|     params do | ||||
|  | @ -89,5 +98,38 @@ module API | |||
|       # it is safe only for submission of new events. | ||||
|       no_content! | ||||
|     end | ||||
| 
 | ||||
|     desc 'Submit error tracking event to the project' do | ||||
|       detail 'This feature was introduced in GitLab 14.1.' | ||||
|     end | ||||
|     params do | ||||
|       requires :id, type: String, desc: 'The ID of a project' | ||||
|     end | ||||
|     post 'error_tracking/collector/api/:id/store' do | ||||
|       # There is a reason why we have such uncommon path. | ||||
|       # We depend on a client side error tracking software which | ||||
|       # modifies URL for its own reasons. | ||||
|       # | ||||
|       # When we give user a URL like this | ||||
|       #   HOST/api/v4/error_tracking/collector/123 | ||||
|       # | ||||
|       # Then error tracking software will convert it like this: | ||||
|       #   HOST/api/v4/error_tracking/collector/api/123/store/ | ||||
| 
 | ||||
|       begin | ||||
|         parsed_body = Gitlab::Json.parse(request.body.read) | ||||
|       rescue StandardError | ||||
|         bad_request!('Failed to parse sentry request') | ||||
|       end | ||||
| 
 | ||||
|       ::ErrorTracking::CollectErrorService | ||||
|         .new(project, nil, event: parsed_body) | ||||
|         .execute | ||||
| 
 | ||||
|       # Collector should never return any information back. | ||||
|       # Because DSN and public key are designed for public use, | ||||
|       # it is safe only for submission of new events. | ||||
|       no_content! | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  |  | |||
|  | @ -57,6 +57,10 @@ module Backup | |||
|       }.merge(Gitlab::GitalyClient.connection_data(repository.storage)).to_json) | ||||
|     end | ||||
| 
 | ||||
|     def parallel_enqueue? | ||||
|       false | ||||
|     end | ||||
| 
 | ||||
|     private | ||||
| 
 | ||||
|     def started? | ||||
|  |  | |||
|  | @ -0,0 +1,132 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| module Backup | ||||
|   # Backup and restores repositories using the gitaly RPC | ||||
|   class GitalyRpcBackup | ||||
|     def initialize(progress) | ||||
|       @progress = progress | ||||
|     end | ||||
| 
 | ||||
|     def start(type) | ||||
|       raise Error, 'already started' if @type | ||||
| 
 | ||||
|       @type = type | ||||
|       case type | ||||
|       when :create | ||||
|         FileUtils.rm_rf(backup_repos_path) | ||||
|         FileUtils.mkdir_p(Gitlab.config.backup.path) | ||||
|         FileUtils.mkdir(backup_repos_path, mode: 0700) | ||||
|       when :restore | ||||
|         # no op | ||||
|       else | ||||
|         raise Error, "unknown backup type: #{type}" | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     def wait | ||||
|       @type = nil | ||||
|     end | ||||
| 
 | ||||
|     def enqueue(container, repository_type) | ||||
|       backup_restore = BackupRestore.new( | ||||
|         progress, | ||||
|         repository_type.repository_for(container), | ||||
|         backup_repos_path | ||||
|       ) | ||||
| 
 | ||||
|       case @type | ||||
|       when :create | ||||
|         backup_restore.backup | ||||
|       when :restore | ||||
|         backup_restore.restore(always_create: repository_type.project?) | ||||
|       else | ||||
|         raise Error, 'not started' | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     def parallel_enqueue? | ||||
|       true | ||||
|     end | ||||
| 
 | ||||
|     private | ||||
| 
 | ||||
|     attr_reader :progress | ||||
| 
 | ||||
|     def backup_repos_path | ||||
|       @backup_repos_path ||= File.join(Gitlab.config.backup.path, 'repositories') | ||||
|     end | ||||
| 
 | ||||
|     class BackupRestore | ||||
|       attr_accessor :progress, :repository, :backup_repos_path | ||||
| 
 | ||||
|       def initialize(progress, repository, backup_repos_path) | ||||
|         @progress = progress | ||||
|         @repository = repository | ||||
|         @backup_repos_path = backup_repos_path | ||||
|       end | ||||
| 
 | ||||
|       def backup | ||||
|         progress.puts " * #{display_repo_path} ... " | ||||
| 
 | ||||
|         if repository.empty? | ||||
|           progress.puts " * #{display_repo_path} ... " + "[EMPTY] [SKIPPED]".color(:cyan) | ||||
|           return | ||||
|         end | ||||
| 
 | ||||
|         FileUtils.mkdir_p(repository_backup_path) | ||||
| 
 | ||||
|         repository.bundle_to_disk(path_to_bundle) | ||||
|         repository.gitaly_repository_client.backup_custom_hooks(custom_hooks_tar) | ||||
| 
 | ||||
|         progress.puts " * #{display_repo_path} ... " + "[DONE]".color(:green) | ||||
| 
 | ||||
|       rescue StandardError => e | ||||
|         progress.puts "[Failed] backing up #{display_repo_path}".color(:red) | ||||
|         progress.puts "Error #{e}".color(:red) | ||||
|       end | ||||
| 
 | ||||
|       def restore(always_create: false) | ||||
|         progress.puts " * #{display_repo_path} ... " | ||||
| 
 | ||||
|         repository.remove rescue nil | ||||
| 
 | ||||
|         if File.exist?(path_to_bundle) | ||||
|           repository.create_from_bundle(path_to_bundle) | ||||
|           restore_custom_hooks | ||||
|         elsif always_create | ||||
|           repository.create_repository | ||||
|         end | ||||
| 
 | ||||
|         progress.puts " * #{display_repo_path} ... " + "[DONE]".color(:green) | ||||
| 
 | ||||
|       rescue StandardError => e | ||||
|         progress.puts "[Failed] restoring #{display_repo_path}".color(:red) | ||||
|         progress.puts "Error #{e}".color(:red) | ||||
|       end | ||||
| 
 | ||||
|       private | ||||
| 
 | ||||
|       def display_repo_path | ||||
|         "#{repository.full_path} (#{repository.disk_path})" | ||||
|       end | ||||
| 
 | ||||
|       def repository_backup_path | ||||
|         @repository_backup_path ||= File.join(backup_repos_path, repository.disk_path) | ||||
|       end | ||||
| 
 | ||||
|       def path_to_bundle | ||||
|         @path_to_bundle ||= File.join(backup_repos_path, repository.disk_path + '.bundle') | ||||
|       end | ||||
| 
 | ||||
|       def restore_custom_hooks | ||||
|         return unless File.exist?(custom_hooks_tar) | ||||
| 
 | ||||
|         repository.gitaly_repository_client.restore_custom_hooks(custom_hooks_tar) | ||||
|       end | ||||
| 
 | ||||
|       def custom_hooks_tar | ||||
|         File.join(repository_backup_path, "custom_hooks.tar") | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -9,10 +9,36 @@ module Backup | |||
|       @strategy = strategy | ||||
|     end | ||||
| 
 | ||||
|     def dump | ||||
|     def dump(max_concurrency:, max_storage_concurrency:) | ||||
|       strategy.start(:create) | ||||
|       enqueue_consecutive | ||||
| 
 | ||||
|       # gitaly-backup is designed to handle concurrency on its own. So we want | ||||
|       # to avoid entering the buggy concurrency code here when gitaly-backup | ||||
|       # is enabled. | ||||
|       if (max_concurrency <= 1 && max_storage_concurrency <= 1) || !strategy.parallel_enqueue? | ||||
|         return enqueue_consecutive | ||||
|       end | ||||
| 
 | ||||
|       check_valid_storages! | ||||
| 
 | ||||
|       semaphore = Concurrent::Semaphore.new(max_concurrency) | ||||
|       errors = Queue.new | ||||
| 
 | ||||
|       threads = Gitlab.config.repositories.storages.keys.map do |storage| | ||||
|         Thread.new do | ||||
|           Rails.application.executor.wrap do | ||||
|             enqueue_storage(storage, semaphore, max_storage_concurrency: max_storage_concurrency) | ||||
|           rescue StandardError => e | ||||
|             errors << e | ||||
|           end | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       ActiveSupport::Dependencies.interlock.permit_concurrent_loads do | ||||
|         threads.each(&:join) | ||||
|       end | ||||
| 
 | ||||
|       raise errors.pop unless errors.empty? | ||||
|     ensure | ||||
|       strategy.wait | ||||
|     end | ||||
|  | @ -32,6 +58,18 @@ module Backup | |||
| 
 | ||||
|     attr_reader :progress, :strategy | ||||
| 
 | ||||
|     def check_valid_storages! | ||||
|       repository_storage_klasses.each do |klass| | ||||
|         if klass.excluding_repository_storage(Gitlab.config.repositories.storages.keys).exists? | ||||
|           raise Error, "repositories.storages in gitlab.yml does not include all storages used by #{klass}" | ||||
|         end | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     def repository_storage_klasses | ||||
|       [ProjectRepository, SnippetRepository] | ||||
|     end | ||||
| 
 | ||||
|     def enqueue_consecutive | ||||
|       enqueue_consecutive_projects | ||||
|       enqueue_consecutive_snippets | ||||
|  | @ -47,6 +85,50 @@ module Backup | |||
|       Snippet.find_each(batch_size: 1000) { |snippet| enqueue_snippet(snippet) } | ||||
|     end | ||||
| 
 | ||||
|     def enqueue_storage(storage, semaphore, max_storage_concurrency:) | ||||
|       errors = Queue.new | ||||
|       queue = InterlockSizedQueue.new(1) | ||||
| 
 | ||||
|       threads = Array.new(max_storage_concurrency) do | ||||
|         Thread.new do | ||||
|           Rails.application.executor.wrap do | ||||
|             while container = queue.pop | ||||
|               ActiveSupport::Dependencies.interlock.permit_concurrent_loads do | ||||
|                 semaphore.acquire | ||||
|               end | ||||
| 
 | ||||
|               begin | ||||
|                 enqueue_container(container) | ||||
|               rescue StandardError => e | ||||
|                 errors << e | ||||
|                 break | ||||
|               ensure | ||||
|                 semaphore.release | ||||
|               end | ||||
|             end | ||||
|           end | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       enqueue_records_for_storage(storage, queue, errors) | ||||
| 
 | ||||
|       raise errors.pop unless errors.empty? | ||||
|     ensure | ||||
|       queue.close | ||||
|       ActiveSupport::Dependencies.interlock.permit_concurrent_loads do | ||||
|         threads.each(&:join) | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     def enqueue_container(container) | ||||
|       case container | ||||
|       when Project | ||||
|         enqueue_project(container) | ||||
|       when Snippet | ||||
|         enqueue_snippet(container) | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     def enqueue_project(project) | ||||
|       strategy.enqueue(project, Gitlab::GlRepository::PROJECT) | ||||
|       strategy.enqueue(project, Gitlab::GlRepository::WIKI) | ||||
|  | @ -57,10 +139,32 @@ module Backup | |||
|       strategy.enqueue(snippet, Gitlab::GlRepository::SNIPPET) | ||||
|     end | ||||
| 
 | ||||
|     def enqueue_records_for_storage(storage, queue, errors) | ||||
|       records_to_enqueue(storage).each do |relation| | ||||
|         relation.find_each(batch_size: 100) do |project| | ||||
|           break unless errors.empty? | ||||
| 
 | ||||
|           queue.push(project) | ||||
|         end | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     def records_to_enqueue(storage) | ||||
|       [projects_in_storage(storage), snippets_in_storage(storage)] | ||||
|     end | ||||
| 
 | ||||
|     def projects_in_storage(storage) | ||||
|       project_relation.id_in(ProjectRepository.for_repository_storage(storage).select(:project_id)) | ||||
|     end | ||||
| 
 | ||||
|     def project_relation | ||||
|       Project.includes(:route, :group, namespace: :owner) | ||||
|     end | ||||
| 
 | ||||
|     def snippets_in_storage(storage) | ||||
|       Snippet.id_in(SnippetRepository.for_repository_storage(storage).select(:snippet_id)) | ||||
|     end | ||||
| 
 | ||||
|     def restore_object_pools | ||||
|       PoolRepository.includes(:source_project).find_each do |pool| | ||||
|         progress.puts " - Object pool #{pool.disk_path}..." | ||||
|  | @ -95,6 +199,24 @@ module Backup | |||
| 
 | ||||
|       Snippet.id_in(invalid_snippets).delete_all | ||||
|     end | ||||
| 
 | ||||
|     class InterlockSizedQueue < SizedQueue | ||||
|       extend ::Gitlab::Utils::Override | ||||
| 
 | ||||
|       override :pop | ||||
|       def pop(*) | ||||
|         ActiveSupport::Dependencies.interlock.permit_concurrent_loads do | ||||
|           super | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       override :push | ||||
|       def push(*) | ||||
|         ActiveSupport::Dependencies.interlock.permit_concurrent_loads do | ||||
|           super | ||||
|         end | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
| 
 | ||||
|  |  | |||
|  | @ -0,0 +1,24 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| # This module removes the X-Sendfile-Type header for /admin/sidekiq | ||||
| # assets since Workhorse isn't always guaranteed to have the assets | ||||
| # present on disk, such as when using Cloud Native GitLab | ||||
| # containers. These assets are also small and served infrequently so it | ||||
| # should be fine to do this. | ||||
| module Gitlab | ||||
|   module Middleware | ||||
|     class SidekiqWebStatic | ||||
|       SIDEKIQ_REGEX = %r{\A/admin/sidekiq/}.freeze | ||||
| 
 | ||||
|       def initialize(app) | ||||
|         @app = app | ||||
|       end | ||||
| 
 | ||||
|       def call(env) | ||||
|         env.delete('HTTP_X_SENDFILE_TYPE') if env['PATH_INFO'] =~ SIDEKIQ_REGEX | ||||
| 
 | ||||
|         @app.call(env) | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -82,8 +82,14 @@ module Gitlab | |||
|     end | ||||
| 
 | ||||
|     def self.configure_throttles(rack_attack) | ||||
|       throttle_or_track(rack_attack, 'throttle_unauthenticated', Gitlab::Throttle.unauthenticated_options) do |req| | ||||
|         if req.throttle_unauthenticated? | ||||
|       throttle_or_track(rack_attack, 'throttle_unauthenticated_api', Gitlab::Throttle.unauthenticated_api_options) do |req| | ||||
|         if req.throttle_unauthenticated_api? | ||||
|           req.ip | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       throttle_or_track(rack_attack, 'throttle_unauthenticated_web', Gitlab::Throttle.unauthenticated_web_options) do |req| | ||||
|         if req.throttle_unauthenticated_web? | ||||
|           req.ip | ||||
|         end | ||||
|       end | ||||
|  | @ -177,7 +183,15 @@ module Gitlab | |||
|       return false if dry_run_config.empty? | ||||
|       return true if dry_run_config == '*' | ||||
| 
 | ||||
|       dry_run_config.split(',').map(&:strip).include?(name) | ||||
|       dry_run_throttles = dry_run_config.split(',').map(&:strip) | ||||
| 
 | ||||
|       # `throttle_unauthenticated` was split into API and web, so to maintain backwards-compatibility | ||||
|       # this throttle name now controls both rate limits. | ||||
|       if dry_run_throttles.include?('throttle_unauthenticated') | ||||
|         dry_run_throttles += %w[throttle_unauthenticated_api throttle_unauthenticated_web] | ||||
|       end | ||||
| 
 | ||||
|       dry_run_throttles.include?(name) | ||||
|     end | ||||
| 
 | ||||
|     def self.user_allowlist | ||||
|  |  | |||
|  | @ -60,10 +60,19 @@ module Gitlab | |||
|         path =~ protected_paths_regex | ||||
|       end | ||||
| 
 | ||||
|       def throttle_unauthenticated? | ||||
|       def throttle_unauthenticated_api? | ||||
|         api_request? && | ||||
|         !should_be_skipped? && | ||||
|         !throttle_unauthenticated_packages_api? && | ||||
|         !throttle_unauthenticated_files_api? && | ||||
|         Gitlab::Throttle.settings.throttle_unauthenticated_api_enabled && | ||||
|         unauthenticated? | ||||
|       end | ||||
| 
 | ||||
|       def throttle_unauthenticated_web? | ||||
|         web_request? && | ||||
|         !should_be_skipped? && | ||||
|         # TODO: Column will be renamed in https://gitlab.com/gitlab-org/gitlab/-/issues/340031 | ||||
|         Gitlab::Throttle.settings.throttle_unauthenticated_enabled && | ||||
|         unauthenticated? | ||||
|       end | ||||
|  |  | |||
|  | @ -24,7 +24,14 @@ module Gitlab | |||
|       "HTTP_#{env_value.upcase.tr('-', '_')}" | ||||
|     end | ||||
| 
 | ||||
|     def self.unauthenticated_options | ||||
|     def self.unauthenticated_api_options | ||||
|       limit_proc = proc { |req| settings.throttle_unauthenticated_api_requests_per_period } | ||||
|       period_proc = proc { |req| settings.throttle_unauthenticated_api_period_in_seconds.seconds } | ||||
|       { limit: limit_proc, period: period_proc } | ||||
|     end | ||||
| 
 | ||||
|     def self.unauthenticated_web_options | ||||
|       # TODO: Columns will be renamed in https://gitlab.com/gitlab-org/gitlab/-/issues/340031 | ||||
|       limit_proc = proc { |req| settings.throttle_unauthenticated_requests_per_period } | ||||
|       period_proc = proc { |req| settings.throttle_unauthenticated_period_in_seconds.seconds } | ||||
|       { limit: limit_proc, period: period_proc } | ||||
|  |  | |||
|  | @ -102,10 +102,19 @@ namespace :gitlab do | |||
|       task create: :gitlab_environment do | ||||
|         puts_time "Dumping repositories ...".color(:blue) | ||||
| 
 | ||||
|         max_concurrency = ENV.fetch('GITLAB_BACKUP_MAX_CONCURRENCY', 1).to_i | ||||
|         max_storage_concurrency = ENV.fetch('GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY', 1).to_i | ||||
| 
 | ||||
|         if ENV["SKIP"] && ENV["SKIP"].include?("repositories") | ||||
|           puts_time "[SKIPPED]".color(:cyan) | ||||
|         elsif max_concurrency < 1 || max_storage_concurrency < 1 | ||||
|           puts "GITLAB_BACKUP_MAX_CONCURRENCY and GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY must have a value of at least 1".color(:red) | ||||
|           exit 1 | ||||
|         else | ||||
|           Backup::Repositories.new(progress, strategy: repository_backup_strategy).dump | ||||
|           Backup::Repositories.new(progress, strategy: repository_backup_strategy).dump( | ||||
|             max_concurrency: max_concurrency, | ||||
|             max_storage_concurrency: max_storage_concurrency | ||||
|           ) | ||||
|           puts_time "done".color(:green) | ||||
|         end | ||||
|       end | ||||
|  | @ -290,9 +299,13 @@ namespace :gitlab do | |||
|     end | ||||
| 
 | ||||
|     def repository_backup_strategy | ||||
|       max_concurrency = ENV['GITLAB_BACKUP_MAX_CONCURRENCY'].presence | ||||
|       max_storage_concurrency = ENV['GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY'].presence | ||||
|       Backup::GitalyBackup.new(progress, parallel: max_concurrency, parallel_storage: max_storage_concurrency) | ||||
|       if Feature.enabled?(:gitaly_backup, default_enabled: :yaml) | ||||
|         max_concurrency = ENV['GITLAB_BACKUP_MAX_CONCURRENCY'].presence | ||||
|         max_storage_concurrency = ENV['GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY'].presence | ||||
|         Backup::GitalyBackup.new(progress, parallel: max_concurrency, parallel_storage: max_storage_concurrency) | ||||
|       else | ||||
|         Backup::GitalyRpcBackup.new(progress) | ||||
|       end | ||||
|     end | ||||
|   end | ||||
|   # namespace end: backup | ||||
|  |  | |||
|  | @ -17028,9 +17028,6 @@ msgstr "" | |||
| msgid "Import Projects from Gitea" | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "Import a project" | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "Import an exported GitLab project" | ||||
| msgstr "" | ||||
| 
 | ||||
|  | @ -17115,6 +17112,27 @@ msgstr "" | |||
| msgid "Import timed out. Import took longer than %{import_jobs_expiration} seconds" | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "ImportAProjectModal|Import from a project" | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "ImportAProjectModal|Import members from another project" | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "ImportAProjectModal|Import project members" | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "ImportAProjectModal|Only project members (not group members) are imported, and they get the same permissions as the project you import from." | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "ImportAProjectModal|Successfully imported" | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "ImportAProjectModal|Unable to import project members" | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "ImportAProjectModal|You're importing members to the %{strongStart}%{name}%{strongEnd} project." | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "ImportButtons|Connect repositories from" | ||||
| msgstr "" | ||||
| 
 | ||||
|  | @ -26215,9 +26233,21 @@ msgstr "" | |||
| msgid "ProjectSelect| or group" | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "ProjectSelect|No matching results" | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "ProjectSelect|Search for project" | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "ProjectSelect|Search projects" | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "ProjectSelect|Select a project" | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "ProjectSelect|There was an error fetching the projects. Please try again." | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "ProjectService|Drone server URL" | ||||
| msgstr "" | ||||
| 
 | ||||
|  |  | |||
|  | @ -43,10 +43,15 @@ RSpec.describe 'Projects > Settings > User manages project members' do | |||
| 
 | ||||
|     visit(project_project_members_path(project)) | ||||
| 
 | ||||
|     click_link('Import a project') | ||||
|     click_on 'Import from a project' | ||||
|     click_on 'Select a project' | ||||
|     wait_for_requests | ||||
| 
 | ||||
|     select2(project2.id, from: '#source_project_id') | ||||
|     click_button('Import project members') | ||||
|     click_button project2.name | ||||
|     click_button 'Import project members' | ||||
|     wait_for_requests | ||||
| 
 | ||||
|     page.refresh | ||||
| 
 | ||||
|     expect(find_member_row(user_mike)).to have_content('Reporter') | ||||
|   end | ||||
|  |  | |||
|  | @ -0,0 +1,62 @@ | |||
| import MockAdapter from 'axios-mock-adapter'; | ||||
| import * as projectsApi from '~/api/projects_api'; | ||||
| import axios from '~/lib/utils/axios_utils'; | ||||
| 
 | ||||
| describe('~/api/projects_api.js', () => { | ||||
|   let mock; | ||||
|   let originalGon; | ||||
| 
 | ||||
|   const projectId = 1; | ||||
| 
 | ||||
|   beforeEach(() => { | ||||
|     mock = new MockAdapter(axios); | ||||
| 
 | ||||
|     originalGon = window.gon; | ||||
|     window.gon = { api_version: 'v7' }; | ||||
|   }); | ||||
| 
 | ||||
|   afterEach(() => { | ||||
|     mock.restore(); | ||||
|     window.gon = originalGon; | ||||
|   }); | ||||
| 
 | ||||
|   describe('getProjects', () => { | ||||
|     beforeEach(() => { | ||||
|       jest.spyOn(axios, 'get'); | ||||
|     }); | ||||
| 
 | ||||
|     it('retrieves projects from the correct URL and returns them in the response data', () => { | ||||
|       const expectedUrl = '/api/v7/projects.json'; | ||||
|       const expectedParams = { params: { per_page: 20, search: '', simple: true } }; | ||||
|       const expectedProjects = [{ name: 'project 1' }]; | ||||
|       const query = ''; | ||||
|       const options = {}; | ||||
| 
 | ||||
|       mock.onGet(expectedUrl).reply(200, { data: expectedProjects }); | ||||
| 
 | ||||
|       return projectsApi.getProjects(query, options).then(({ data }) => { | ||||
|         expect(axios.get).toHaveBeenCalledWith(expectedUrl, expectedParams); | ||||
|         expect(data.data).toEqual(expectedProjects); | ||||
|       }); | ||||
|     }); | ||||
|   }); | ||||
| 
 | ||||
|   describe('importProjectMembers', () => { | ||||
|     beforeEach(() => { | ||||
|       jest.spyOn(axios, 'post'); | ||||
|     }); | ||||
| 
 | ||||
|     it('posts to the correct URL and returns the response message', () => { | ||||
|       const targetId = 2; | ||||
|       const expectedUrl = '/api/v7/projects/1/import_project_members/2'; | ||||
|       const expectedMessage = 'Successfully imported'; | ||||
| 
 | ||||
|       mock.onPost(expectedUrl).replyOnce(200, expectedMessage); | ||||
| 
 | ||||
|       return projectsApi.importProjectMembers(projectId, targetId).then(({ data }) => { | ||||
|         expect(axios.post).toHaveBeenCalledWith(expectedUrl); | ||||
|         expect(data).toEqual(expectedMessage); | ||||
|       }); | ||||
|     }); | ||||
|   }); | ||||
| }); | ||||
|  | @ -0,0 +1,167 @@ | |||
| import { GlFormGroup, GlSprintf, GlModal } from '@gitlab/ui'; | ||||
| import MockAdapter from 'axios-mock-adapter'; | ||||
| import { stubComponent } from 'helpers/stub_component'; | ||||
| import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; | ||||
| import waitForPromises from 'helpers/wait_for_promises'; | ||||
| import * as ProjectsApi from '~/api/projects_api'; | ||||
| import ImportAProjectModal from '~/invite_members/components/import_a_project_modal.vue'; | ||||
| import ProjectSelect from '~/invite_members/components/project_select.vue'; | ||||
| import axios from '~/lib/utils/axios_utils'; | ||||
| 
 | ||||
| let wrapper; | ||||
| let mock; | ||||
| 
 | ||||
| const projectId = '1'; | ||||
| const projectName = 'test name'; | ||||
| const projectToBeImported = { id: '2' }; | ||||
| const $toast = { | ||||
|   show: jest.fn(), | ||||
| }; | ||||
| 
 | ||||
| const createComponent = () => { | ||||
|   wrapper = shallowMountExtended(ImportAProjectModal, { | ||||
|     propsData: { | ||||
|       projectId, | ||||
|       projectName, | ||||
|     }, | ||||
|     stubs: { | ||||
|       GlModal: stubComponent(GlModal, { | ||||
|         template: | ||||
|           '<div><slot name="modal-title"></slot><slot></slot><slot name="modal-footer"></slot></div>', | ||||
|       }), | ||||
|       GlSprintf, | ||||
|       GlFormGroup: stubComponent(GlFormGroup, { | ||||
|         props: ['state', 'invalidFeedback'], | ||||
|       }), | ||||
|     }, | ||||
|     mocks: { | ||||
|       $toast, | ||||
|     }, | ||||
|   }); | ||||
| }; | ||||
| 
 | ||||
| beforeEach(() => { | ||||
|   gon.api_version = 'v4'; | ||||
|   mock = new MockAdapter(axios); | ||||
| }); | ||||
| 
 | ||||
| afterEach(() => { | ||||
|   wrapper.destroy(); | ||||
|   mock.restore(); | ||||
| }); | ||||
| 
 | ||||
| describe('ImportAProjectModal', () => { | ||||
|   const findIntroText = () => wrapper.find({ ref: 'modalIntro' }).text(); | ||||
|   const findCancelButton = () => wrapper.findByTestId('cancel-button'); | ||||
|   const findImportButton = () => wrapper.findByTestId('import-button'); | ||||
|   const clickImportButton = () => findImportButton().vm.$emit('click'); | ||||
|   const clickCancelButton = () => findCancelButton().vm.$emit('click'); | ||||
|   const findFormGroup = () => wrapper.findByTestId('form-group'); | ||||
|   const formGroupInvalidFeedback = () => findFormGroup().props('invalidFeedback'); | ||||
|   const formGroupErrorState = () => findFormGroup().props('state'); | ||||
|   const findProjectSelect = () => wrapper.findComponent(ProjectSelect); | ||||
| 
 | ||||
|   describe('rendering the modal', () => { | ||||
|     beforeEach(() => { | ||||
|       createComponent(); | ||||
|     }); | ||||
| 
 | ||||
|     it('renders the modal with the correct title', () => { | ||||
|       expect(wrapper.findComponent(GlModal).props('title')).toBe( | ||||
|         'Import members from another project', | ||||
|       ); | ||||
|     }); | ||||
| 
 | ||||
|     it('renders the Cancel button text correctly', () => { | ||||
|       expect(findCancelButton().text()).toBe('Cancel'); | ||||
|     }); | ||||
| 
 | ||||
|     it('renders the Import button text correctly', () => { | ||||
|       expect(findImportButton().text()).toBe('Import project members'); | ||||
|     }); | ||||
| 
 | ||||
|     it('renders the modal intro text correctly', () => { | ||||
|       expect(findIntroText()).toBe("You're importing members to the test name project."); | ||||
|     }); | ||||
| 
 | ||||
|     it('renders the Import button modal without isLoading', () => { | ||||
|       expect(findImportButton().props('loading')).toBe(false); | ||||
|     }); | ||||
| 
 | ||||
|     it('sets isLoading to true when the Invite button is clicked', async () => { | ||||
|       clickImportButton(); | ||||
| 
 | ||||
|       await wrapper.vm.$nextTick(); | ||||
| 
 | ||||
|       expect(findImportButton().props('loading')).toBe(true); | ||||
|     }); | ||||
|   }); | ||||
| 
 | ||||
|   describe('submitting the import form', () => { | ||||
|     describe('when the import is successful', () => { | ||||
|       beforeEach(() => { | ||||
|         createComponent(); | ||||
| 
 | ||||
|         findProjectSelect().vm.$emit('input', projectToBeImported); | ||||
| 
 | ||||
|         jest.spyOn(ProjectsApi, 'importProjectMembers').mockResolvedValue(); | ||||
| 
 | ||||
|         clickImportButton(); | ||||
|       }); | ||||
| 
 | ||||
|       it('calls Api importProjectMembers', () => { | ||||
|         expect(ProjectsApi.importProjectMembers).toHaveBeenCalledWith( | ||||
|           projectId, | ||||
|           projectToBeImported.id, | ||||
|         ); | ||||
|       }); | ||||
| 
 | ||||
|       it('displays the successful toastMessage', () => { | ||||
|         expect($toast.show).toHaveBeenCalledWith( | ||||
|           'Successfully imported', | ||||
|           wrapper.vm.$options.toastOptions, | ||||
|         ); | ||||
|       }); | ||||
| 
 | ||||
|       it('sets isLoading to false after success', () => { | ||||
|         expect(findImportButton().props('loading')).toBe(false); | ||||
|       }); | ||||
|     }); | ||||
| 
 | ||||
|     describe('when the import fails', () => { | ||||
|       beforeEach(async () => { | ||||
|         createComponent(); | ||||
| 
 | ||||
|         findProjectSelect().vm.$emit('input', projectToBeImported); | ||||
| 
 | ||||
|         jest | ||||
|           .spyOn(ProjectsApi, 'importProjectMembers') | ||||
|           .mockRejectedValue({ response: { data: { success: false } } }); | ||||
| 
 | ||||
|         clickImportButton(); | ||||
|         await waitForPromises(); | ||||
|       }); | ||||
| 
 | ||||
|       it('displays the generic error message', () => { | ||||
|         expect(formGroupInvalidFeedback()).toBe('Unable to import project members'); | ||||
|         expect(formGroupErrorState()).toBe(false); | ||||
|       }); | ||||
| 
 | ||||
|       it('sets isLoading to false after error', () => { | ||||
|         expect(findImportButton().props('loading')).toBe(false); | ||||
|       }); | ||||
| 
 | ||||
|       it('clears the error when the modal is closed with an error', async () => { | ||||
|         expect(formGroupInvalidFeedback()).toBe('Unable to import project members'); | ||||
|         expect(formGroupErrorState()).toBe(false); | ||||
| 
 | ||||
|         clickCancelButton(); | ||||
| 
 | ||||
|         await wrapper.vm.$nextTick(); | ||||
| 
 | ||||
|         expect(formGroupInvalidFeedback()).toBe(''); | ||||
|         expect(formGroupErrorState()).not.toBe(false); | ||||
|       }); | ||||
|     }); | ||||
|   }); | ||||
| }); | ||||
|  | @ -0,0 +1,105 @@ | |||
| import { GlSearchBoxByType, GlAvatarLabeled, GlDropdownItem } from '@gitlab/ui'; | ||||
| import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; | ||||
| import waitForPromises from 'helpers/wait_for_promises'; | ||||
| import * as projectsApi from '~/api/projects_api'; | ||||
| import ProjectSelect from '~/invite_members/components/project_select.vue'; | ||||
| import { allProjects, project1 } from '../mock_data/api_response_data'; | ||||
| 
 | ||||
| describe('ProjectSelect', () => { | ||||
|   let wrapper; | ||||
| 
 | ||||
|   const createComponent = () => { | ||||
|     wrapper = shallowMountExtended(ProjectSelect, {}); | ||||
|   }; | ||||
| 
 | ||||
|   beforeEach(() => { | ||||
|     jest.spyOn(projectsApi, 'getProjects').mockResolvedValue(allProjects); | ||||
| 
 | ||||
|     createComponent(); | ||||
|   }); | ||||
| 
 | ||||
|   afterEach(() => { | ||||
|     wrapper.destroy(); | ||||
|   }); | ||||
| 
 | ||||
|   const findSearchBoxByType = () => wrapper.findComponent(GlSearchBoxByType); | ||||
|   const findDropdownItem = (index) => wrapper.findAllComponents(GlDropdownItem).at(index); | ||||
|   const findAvatarLabeled = (index) => findDropdownItem(index).findComponent(GlAvatarLabeled); | ||||
|   const findEmptyResultMessage = () => wrapper.findByTestId('empty-result-message'); | ||||
|   const findErrorMessage = () => wrapper.findByTestId('error-message'); | ||||
| 
 | ||||
|   it('renders GlSearchBoxByType with default attributes', () => { | ||||
|     expect(findSearchBoxByType().exists()).toBe(true); | ||||
|     expect(findSearchBoxByType().vm.$attrs).toMatchObject({ | ||||
|       placeholder: 'Search projects', | ||||
|     }); | ||||
|   }); | ||||
| 
 | ||||
|   describe('when user types in the search input', () => { | ||||
|     let resolveApiRequest; | ||||
|     let rejectApiRequest; | ||||
| 
 | ||||
|     beforeEach(() => { | ||||
|       jest.spyOn(projectsApi, 'getProjects').mockImplementation( | ||||
|         () => | ||||
|           new Promise((resolve, reject) => { | ||||
|             resolveApiRequest = resolve; | ||||
|             rejectApiRequest = reject; | ||||
|           }), | ||||
|       ); | ||||
| 
 | ||||
|       findSearchBoxByType().vm.$emit('input', project1.name); | ||||
|     }); | ||||
| 
 | ||||
|     it('calls the API', () => { | ||||
|       resolveApiRequest({ data: allProjects }); | ||||
| 
 | ||||
|       expect(projectsApi.getProjects).toHaveBeenCalledWith(project1.name, { | ||||
|         active: true, | ||||
|         exclude_internal: true, | ||||
|       }); | ||||
|     }); | ||||
| 
 | ||||
|     it('displays loading icon while waiting for API call to resolve and then sets loading false', async () => { | ||||
|       expect(findSearchBoxByType().props('isLoading')).toBe(true); | ||||
| 
 | ||||
|       resolveApiRequest({ data: allProjects }); | ||||
|       await waitForPromises(); | ||||
| 
 | ||||
|       expect(findSearchBoxByType().props('isLoading')).toBe(false); | ||||
|       expect(findEmptyResultMessage().exists()).toBe(false); | ||||
|       expect(findErrorMessage().exists()).toBe(false); | ||||
|     }); | ||||
| 
 | ||||
|     it('displays a dropdown item and avatar for each project fetched', async () => { | ||||
|       resolveApiRequest({ data: allProjects }); | ||||
|       await waitForPromises(); | ||||
| 
 | ||||
|       allProjects.forEach((project, index) => { | ||||
|         expect(findDropdownItem(index).attributes('name')).toBe(project.name_with_namespace); | ||||
|         expect(findAvatarLabeled(index).attributes()).toMatchObject({ | ||||
|           src: project.avatar_url, | ||||
|           'entity-id': String(project.id), | ||||
|           'entity-name': project.name_with_namespace, | ||||
|         }); | ||||
|         expect(findAvatarLabeled(index).props('label')).toBe(project.name_with_namespace); | ||||
|       }); | ||||
|     }); | ||||
| 
 | ||||
|     it('displays the empty message when the API results are empty', async () => { | ||||
|       resolveApiRequest({ data: [] }); | ||||
|       await waitForPromises(); | ||||
| 
 | ||||
|       expect(findEmptyResultMessage().text()).toBe('No matching results'); | ||||
|     }); | ||||
| 
 | ||||
|     it('displays the error message when the fetch fails', async () => { | ||||
|       rejectApiRequest(); | ||||
|       await waitForPromises(); | ||||
| 
 | ||||
|       expect(findErrorMessage().text()).toBe( | ||||
|         'There was an error fetching the projects. Please try again.', | ||||
|       ); | ||||
|     }); | ||||
|   }); | ||||
| }); | ||||
|  | @ -0,0 +1,13 @@ | |||
| export const project1 = { | ||||
|   id: 1, | ||||
|   name: 'Project One', | ||||
|   name_with_namespace: 'Project One', | ||||
|   avatar_url: 'test1', | ||||
| }; | ||||
| export const project2 = { | ||||
|   id: 2, | ||||
|   name: 'Project One', | ||||
|   name_with_namespace: 'Project Two', | ||||
|   avatar_url: 'test2', | ||||
| }; | ||||
| export const allProjects = [project1, project2]; | ||||
|  | @ -1,5 +1,6 @@ | |||
| import { GlFilteredSearchToken, GlFilteredSearchSuggestion } from '@gitlab/ui'; | ||||
| import { shallowMount } from '@vue/test-utils'; | ||||
| import { PIPELINE_SOURCES } from 'ee_else_ce/pipelines/components/pipelines_list/tokens/constants'; | ||||
| import { stubComponent } from 'helpers/stub_component'; | ||||
| import PipelineSourceToken from '~/pipelines/components/pipelines_list/tokens/pipeline_source_token.vue'; | ||||
| 
 | ||||
|  | @ -44,7 +45,7 @@ describe('Pipeline Source Token', () => { | |||
| 
 | ||||
|   describe('shows sources correctly', () => { | ||||
|     it('renders all pipeline sources available', () => { | ||||
|       expect(findAllFilteredSearchSuggestions()).toHaveLength(wrapper.vm.sources.length); | ||||
|       expect(findAllFilteredSearchSuggestions()).toHaveLength(PIPELINE_SOURCES.length); | ||||
|     }); | ||||
|   }); | ||||
| }); | ||||
|  |  | |||
|  | @ -0,0 +1,72 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe Mutations::CustomerRelations::Organizations::Create do | ||||
|   let_it_be(:user) { create(:user) } | ||||
| 
 | ||||
|   let(:group) { create(:group) } | ||||
| 
 | ||||
|   let(:valid_params) do | ||||
|     attributes_for(:organization, | ||||
|       group: group, | ||||
|       description: 'This company is super important!', | ||||
|       default_rate: 1_000 | ||||
|     ) | ||||
|   end | ||||
| 
 | ||||
|   describe 'create organizations mutation' do | ||||
|     describe '#resolve' do | ||||
|       subject(:resolve_mutation) do | ||||
|         described_class.new(object: nil, context: { current_user: user }, field: nil).resolve( | ||||
|           **valid_params, | ||||
|           group_id: group.to_global_id | ||||
|         ) | ||||
|       end | ||||
| 
 | ||||
|       context 'when the user does not have permission' do | ||||
|         before do | ||||
|           group.add_guest(user) | ||||
|         end | ||||
| 
 | ||||
|         it 'raises an error' do | ||||
|           expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable) | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       context 'when the user has permission' do | ||||
|         before do | ||||
|           group.add_reporter(user) | ||||
|         end | ||||
| 
 | ||||
|         context 'when the feature is disabled' do | ||||
|           before do | ||||
|             stub_feature_flags(customer_relations: false) | ||||
|           end | ||||
| 
 | ||||
|           it 'raises an error' do | ||||
|             expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable) | ||||
|           end | ||||
|         end | ||||
| 
 | ||||
|         context 'when the params are invalid' do | ||||
|           before do | ||||
|             valid_params[:name] = nil | ||||
|           end | ||||
| 
 | ||||
|           it 'returns the validation error' do | ||||
|             expect(resolve_mutation[:errors]).to eq(["Name can't be blank"]) | ||||
|           end | ||||
|         end | ||||
| 
 | ||||
|         context 'when the user has permission to create an organization' do | ||||
|           it 'creates organization with correct values' do | ||||
|             expect(resolve_mutation[:organization]).to have_attributes(valid_params) | ||||
|           end | ||||
|         end | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   specify { expect(described_class).to require_graphql_authorizations(:admin_organization) } | ||||
| end | ||||
|  | @ -0,0 +1,153 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe Backup::GitalyRpcBackup do | ||||
|   let(:progress) { spy(:stdout) } | ||||
| 
 | ||||
|   subject { described_class.new(progress) } | ||||
| 
 | ||||
|   after do | ||||
|     # make sure we do not leave behind any backup files | ||||
|     FileUtils.rm_rf(File.join(Gitlab.config.backup.path, 'repositories')) | ||||
|   end | ||||
| 
 | ||||
|   context 'unknown' do | ||||
|     it 'fails to start unknown' do | ||||
|       expect { subject.start(:unknown) }.to raise_error(::Backup::Error, 'unknown backup type: unknown') | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   context 'create' do | ||||
|     RSpec.shared_examples 'creates a repository backup' do | ||||
|       it 'creates repository bundles', :aggregate_failures do | ||||
|         # Add data to the wiki, design repositories, and snippets, so they will be included in the dump. | ||||
|         create(:wiki_page, container: project) | ||||
|         create(:design, :with_file, issue: create(:issue, project: project)) | ||||
|         project_snippet = create(:project_snippet, :repository, project: project) | ||||
|         personal_snippet = create(:personal_snippet, :repository, author: project.owner) | ||||
| 
 | ||||
|         subject.start(:create) | ||||
|         subject.enqueue(project, Gitlab::GlRepository::PROJECT) | ||||
|         subject.enqueue(project, Gitlab::GlRepository::WIKI) | ||||
|         subject.enqueue(project, Gitlab::GlRepository::DESIGN) | ||||
|         subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET) | ||||
|         subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET) | ||||
|         subject.wait | ||||
| 
 | ||||
|         expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.bundle')) | ||||
|         expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.wiki.bundle')) | ||||
|         expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.design.bundle')) | ||||
|         expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', personal_snippet.disk_path + '.bundle')) | ||||
|         expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project_snippet.disk_path + '.bundle')) | ||||
|       end | ||||
| 
 | ||||
|       context 'failure' do | ||||
|         before do | ||||
|           allow_next_instance_of(Repository) do |repository| | ||||
|             allow(repository).to receive(:bundle_to_disk) { raise 'Fail in tests' } | ||||
|           end | ||||
|         end | ||||
| 
 | ||||
|         it 'logs an appropriate message', :aggregate_failures do | ||||
|           subject.start(:create) | ||||
|           subject.enqueue(project, Gitlab::GlRepository::PROJECT) | ||||
|           subject.wait | ||||
| 
 | ||||
|           expect(progress).to have_received(:puts).with("[Failed] backing up #{project.full_path} (#{project.disk_path})") | ||||
|           expect(progress).to have_received(:puts).with("Error Fail in tests") | ||||
|         end | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     context 'hashed storage' do | ||||
|       let_it_be(:project) { create(:project, :repository) } | ||||
| 
 | ||||
|       it_behaves_like 'creates a repository backup' | ||||
|     end | ||||
| 
 | ||||
|     context 'legacy storage' do | ||||
|       let_it_be(:project) { create(:project, :repository, :legacy_storage) } | ||||
| 
 | ||||
|       it_behaves_like 'creates a repository backup' | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   context 'restore' do | ||||
|     let_it_be(:project) { create(:project, :repository) } | ||||
|     let_it_be(:personal_snippet) { create(:personal_snippet, author: project.owner) } | ||||
|     let_it_be(:project_snippet) { create(:project_snippet, project: project, author: project.owner) } | ||||
| 
 | ||||
|     def copy_bundle_to_backup_path(bundle_name, destination) | ||||
|       FileUtils.mkdir_p(File.join(Gitlab.config.backup.path, 'repositories', File.dirname(destination))) | ||||
|       FileUtils.cp(Rails.root.join('spec/fixtures/lib/backup', bundle_name), File.join(Gitlab.config.backup.path, 'repositories', destination)) | ||||
|     end | ||||
| 
 | ||||
|     it 'restores from repository bundles', :aggregate_failures do | ||||
|       copy_bundle_to_backup_path('project_repo.bundle', project.disk_path + '.bundle') | ||||
|       copy_bundle_to_backup_path('wiki_repo.bundle', project.disk_path + '.wiki.bundle') | ||||
|       copy_bundle_to_backup_path('design_repo.bundle', project.disk_path + '.design.bundle') | ||||
|       copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle') | ||||
|       copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle') | ||||
| 
 | ||||
|       subject.start(:restore) | ||||
|       subject.enqueue(project, Gitlab::GlRepository::PROJECT) | ||||
|       subject.enqueue(project, Gitlab::GlRepository::WIKI) | ||||
|       subject.enqueue(project, Gitlab::GlRepository::DESIGN) | ||||
|       subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET) | ||||
|       subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET) | ||||
|       subject.wait | ||||
| 
 | ||||
|       collect_commit_shas = -> (repo) { repo.commits('master', limit: 10).map(&:sha) } | ||||
| 
 | ||||
|       expect(collect_commit_shas.call(project.repository)).to eq(['393a7d860a5a4c3cc736d7eb00604e3472bb95ec']) | ||||
|       expect(collect_commit_shas.call(project.wiki.repository)).to eq(['c74b9948d0088d703ee1fafeddd9ed9add2901ea']) | ||||
|       expect(collect_commit_shas.call(project.design_repository)).to eq(['c3cd4d7bd73a51a0f22045c3a4c871c435dc959d']) | ||||
|       expect(collect_commit_shas.call(personal_snippet.repository)).to eq(['3b3c067a3bc1d1b695b51e2be30c0f8cf698a06e']) | ||||
|       expect(collect_commit_shas.call(project_snippet.repository)).to eq(['6e44ba56a4748be361a841e759c20e421a1651a1']) | ||||
|     end | ||||
| 
 | ||||
|     it 'cleans existing repositories', :aggregate_failures do | ||||
|       expect_next_instance_of(DesignManagement::Repository) do |repository| | ||||
|         expect(repository).to receive(:remove) | ||||
|       end | ||||
| 
 | ||||
|       # 4 times = project repo + wiki repo + project_snippet repo + personal_snippet repo | ||||
|       expect(Repository).to receive(:new).exactly(4).times.and_wrap_original do |method, *original_args| | ||||
|         full_path, container, kwargs = original_args | ||||
| 
 | ||||
|         repository = method.call(full_path, container, **kwargs) | ||||
| 
 | ||||
|         expect(repository).to receive(:remove) | ||||
| 
 | ||||
|         repository | ||||
|       end | ||||
| 
 | ||||
|       subject.start(:restore) | ||||
|       subject.enqueue(project, Gitlab::GlRepository::PROJECT) | ||||
|       subject.enqueue(project, Gitlab::GlRepository::WIKI) | ||||
|       subject.enqueue(project, Gitlab::GlRepository::DESIGN) | ||||
|       subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET) | ||||
|       subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET) | ||||
|       subject.wait | ||||
|     end | ||||
| 
 | ||||
|     context 'failure' do | ||||
|       before do | ||||
|         allow_next_instance_of(Repository) do |repository| | ||||
|           allow(repository).to receive(:create_repository) { raise 'Fail in tests' } | ||||
|           allow(repository).to receive(:create_from_bundle) { raise 'Fail in tests' } | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       it 'logs an appropriate message', :aggregate_failures do | ||||
|         subject.start(:restore) | ||||
|         subject.enqueue(project, Gitlab::GlRepository::PROJECT) | ||||
|         subject.wait | ||||
| 
 | ||||
|         expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} (#{project.disk_path})") | ||||
|         expect(progress).to have_received(:puts).with("Error Fail in tests") | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -4,7 +4,8 @@ require 'spec_helper' | |||
| 
 | ||||
| RSpec.describe Backup::Repositories do | ||||
|   let(:progress) { spy(:stdout) } | ||||
|   let(:strategy) { spy(:strategy) } | ||||
|   let(:parallel_enqueue) { true } | ||||
|   let(:strategy) { spy(:strategy, parallel_enqueue?: parallel_enqueue) } | ||||
| 
 | ||||
|   subject { described_class.new(progress, strategy: strategy) } | ||||
| 
 | ||||
|  | @ -16,7 +17,7 @@ RSpec.describe Backup::Repositories do | |||
|         project_snippet = create(:project_snippet, :repository, project: project) | ||||
|         personal_snippet = create(:personal_snippet, :repository, author: project.owner) | ||||
| 
 | ||||
|         subject.dump | ||||
|         subject.dump(max_concurrency: 1, max_storage_concurrency: 1) | ||||
| 
 | ||||
|         expect(strategy).to have_received(:start).with(:create) | ||||
|         expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT) | ||||
|  | @ -40,30 +41,132 @@ RSpec.describe Backup::Repositories do | |||
|       it_behaves_like 'creates repository bundles' | ||||
|     end | ||||
| 
 | ||||
|     context 'command failure' do | ||||
|       it 'enqueue_project raises an error' do | ||||
|         allow(strategy).to receive(:enqueue).with(anything, Gitlab::GlRepository::PROJECT).and_raise(IOError) | ||||
|     context 'no concurrency' do | ||||
|       it 'creates the expected number of threads' do | ||||
|         expect(Thread).not_to receive(:new) | ||||
| 
 | ||||
|         expect { subject.dump }.to raise_error(IOError) | ||||
|         expect(strategy).to receive(:start).with(:create) | ||||
|         projects.each do |project| | ||||
|           expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT) | ||||
|         end | ||||
|         expect(strategy).to receive(:wait) | ||||
| 
 | ||||
|         subject.dump(max_concurrency: 1, max_storage_concurrency: 1) | ||||
|       end | ||||
| 
 | ||||
|       it 'project query raises an error' do | ||||
|         allow(Project).to receive_message_chain(:includes, :find_each).and_raise(ActiveRecord::StatementTimeout) | ||||
|       describe 'command failure' do | ||||
|         it 'enqueue_project raises an error' do | ||||
|           allow(strategy).to receive(:enqueue).with(anything, Gitlab::GlRepository::PROJECT).and_raise(IOError) | ||||
| 
 | ||||
|         expect { subject.dump }.to raise_error(ActiveRecord::StatementTimeout) | ||||
|           expect { subject.dump(max_concurrency: 1, max_storage_concurrency: 1) }.to raise_error(IOError) | ||||
|         end | ||||
| 
 | ||||
|         it 'project query raises an error' do | ||||
|           allow(Project).to receive_message_chain(:includes, :find_each).and_raise(ActiveRecord::StatementTimeout) | ||||
| 
 | ||||
|           expect { subject.dump(max_concurrency: 1, max_storage_concurrency: 1) }.to raise_error(ActiveRecord::StatementTimeout) | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       it 'avoids N+1 database queries' do | ||||
|         control_count = ActiveRecord::QueryRecorder.new do | ||||
|           subject.dump(max_concurrency: 1, max_storage_concurrency: 1) | ||||
|         end.count | ||||
| 
 | ||||
|         create_list(:project, 2, :repository) | ||||
| 
 | ||||
|         expect do | ||||
|           subject.dump(max_concurrency: 1, max_storage_concurrency: 1) | ||||
|         end.not_to exceed_query_limit(control_count) | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     it 'avoids N+1 database queries' do | ||||
|       control_count = ActiveRecord::QueryRecorder.new do | ||||
|         subject.dump | ||||
|       end.count | ||||
|     context 'concurrency with a strategy without parallel enqueueing support' do | ||||
|       let(:parallel_enqueue) { false } | ||||
| 
 | ||||
|       create_list(:project, 2, :repository) | ||||
|       it 'enqueues all projects sequentially' do | ||||
|         expect(Thread).not_to receive(:new) | ||||
| 
 | ||||
|       expect do | ||||
|         subject.dump | ||||
|       end.not_to exceed_query_limit(control_count) | ||||
|         expect(strategy).to receive(:start).with(:create) | ||||
|         projects.each do |project| | ||||
|           expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT) | ||||
|         end | ||||
|         expect(strategy).to receive(:wait) | ||||
| 
 | ||||
|         subject.dump(max_concurrency: 2, max_storage_concurrency: 2) | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     [4, 10].each do |max_storage_concurrency| | ||||
|       context "max_storage_concurrency #{max_storage_concurrency}", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/241701' do | ||||
|         let(:storage_keys) { %w[default test_second_storage] } | ||||
| 
 | ||||
|         before do | ||||
|           allow(Gitlab.config.repositories.storages).to receive(:keys).and_return(storage_keys) | ||||
|         end | ||||
| 
 | ||||
|         it 'creates the expected number of threads' do | ||||
|           expect(Thread).to receive(:new) | ||||
|             .exactly(storage_keys.length * (max_storage_concurrency + 1)).times | ||||
|             .and_call_original | ||||
| 
 | ||||
|           expect(strategy).to receive(:start).with(:create) | ||||
|           projects.each do |project| | ||||
|             expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT) | ||||
|           end | ||||
|           expect(strategy).to receive(:wait) | ||||
| 
 | ||||
|           subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) | ||||
|         end | ||||
| 
 | ||||
|         it 'creates the expected number of threads with extra max concurrency' do | ||||
|           expect(Thread).to receive(:new) | ||||
|             .exactly(storage_keys.length * (max_storage_concurrency + 1)).times | ||||
|             .and_call_original | ||||
| 
 | ||||
|           expect(strategy).to receive(:start).with(:create) | ||||
|           projects.each do |project| | ||||
|             expect(strategy).to receive(:enqueue).with(project, Gitlab::GlRepository::PROJECT) | ||||
|           end | ||||
|           expect(strategy).to receive(:wait) | ||||
| 
 | ||||
|           subject.dump(max_concurrency: 3, max_storage_concurrency: max_storage_concurrency) | ||||
|         end | ||||
| 
 | ||||
|         describe 'command failure' do | ||||
|           it 'enqueue_project raises an error' do | ||||
|             allow(strategy).to receive(:enqueue).and_raise(IOError) | ||||
| 
 | ||||
|             expect { subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) }.to raise_error(IOError) | ||||
|           end | ||||
| 
 | ||||
|           it 'project query raises an error' do | ||||
|             allow(Project).to receive_message_chain(:for_repository_storage, :includes, :find_each).and_raise(ActiveRecord::StatementTimeout) | ||||
| 
 | ||||
|             expect { subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) }.to raise_error(ActiveRecord::StatementTimeout) | ||||
|           end | ||||
| 
 | ||||
|           context 'misconfigured storages' do | ||||
|             let(:storage_keys) { %w[test_second_storage] } | ||||
| 
 | ||||
|             it 'raises an error' do | ||||
|               expect { subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) }.to raise_error(Backup::Error, 'repositories.storages in gitlab.yml is misconfigured') | ||||
|             end | ||||
|           end | ||||
|         end | ||||
| 
 | ||||
|         it 'avoids N+1 database queries' do | ||||
|           control_count = ActiveRecord::QueryRecorder.new do | ||||
|             subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) | ||||
|           end.count | ||||
| 
 | ||||
|           create_list(:project, 2, :repository) | ||||
| 
 | ||||
|           expect do | ||||
|             subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) | ||||
|           end.not_to exceed_query_limit(control_count) | ||||
|         end | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|  |  | |||
|  | @ -0,0 +1,40 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe Gitlab::Middleware::SidekiqWebStatic do | ||||
|   let(:app) { double(:app) } | ||||
|   let(:middleware) { described_class.new(app) } | ||||
|   let(:env) { {} } | ||||
| 
 | ||||
|   describe '#call' do | ||||
|     before do | ||||
|       env['HTTP_X_SENDFILE_TYPE'] = 'X-Sendfile' | ||||
|       env['PATH_INFO'] = path | ||||
|     end | ||||
| 
 | ||||
|     context 'with an /admin/sidekiq route' do | ||||
|       let(:path) { '/admin/sidekiq/javascripts/application.js'} | ||||
| 
 | ||||
|       it 'deletes the HTTP_X_SENDFILE_TYPE header' do | ||||
|         expect(app).to receive(:call) | ||||
| 
 | ||||
|         middleware.call(env) | ||||
| 
 | ||||
|         expect(env['HTTP_X_SENDFILE_TYPE']).to be_nil | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     context 'with some static asset route' do | ||||
|       let(:path) { '/assets/test.png' } | ||||
| 
 | ||||
|       it 'keeps the HTTP_X_SENDFILE_TYPE header' do | ||||
|         expect(app).to receive(:call) | ||||
| 
 | ||||
|         middleware.call(env) | ||||
| 
 | ||||
|         expect(env['HTTP_X_SENDFILE_TYPE']).to eq('X-Sendfile') | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -10,12 +10,19 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do | |||
| 
 | ||||
|     let(:throttles) do | ||||
|       { | ||||
|         throttle_unauthenticated: Gitlab::Throttle.unauthenticated_options, | ||||
|         throttle_unauthenticated_api: Gitlab::Throttle.unauthenticated_api_options, | ||||
|         throttle_unauthenticated_web: Gitlab::Throttle.unauthenticated_web_options, | ||||
|         throttle_authenticated_api: Gitlab::Throttle.authenticated_api_options, | ||||
|         throttle_product_analytics_collector: { limit: 100, period: 60 }, | ||||
|         throttle_unauthenticated_protected_paths: Gitlab::Throttle.unauthenticated_options, | ||||
|         throttle_authenticated_protected_paths_api: Gitlab::Throttle.authenticated_api_options, | ||||
|         throttle_authenticated_protected_paths_web: Gitlab::Throttle.authenticated_web_options | ||||
|         throttle_authenticated_web: Gitlab::Throttle.authenticated_web_options, | ||||
|         throttle_unauthenticated_protected_paths: Gitlab::Throttle.protected_paths_options, | ||||
|         throttle_authenticated_protected_paths_api: Gitlab::Throttle.protected_paths_options, | ||||
|         throttle_authenticated_protected_paths_web: Gitlab::Throttle.protected_paths_options, | ||||
|         throttle_unauthenticated_packages_api: Gitlab::Throttle.unauthenticated_packages_api_options, | ||||
|         throttle_authenticated_packages_api: Gitlab::Throttle.authenticated_packages_api_options, | ||||
|         throttle_authenticated_git_lfs: Gitlab::Throttle.throttle_authenticated_git_lfs_options, | ||||
|         throttle_unauthenticated_files_api: Gitlab::Throttle.unauthenticated_files_api_options, | ||||
|         throttle_authenticated_files_api: Gitlab::Throttle.authenticated_files_api_options | ||||
|       } | ||||
|     end | ||||
| 
 | ||||
|  | @ -84,6 +91,15 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do | |||
|       end | ||||
|     end | ||||
| 
 | ||||
|     it 'enables dry-runs for `throttle_unauthenticated_api` and `throttle_unauthenticated_web` when selecting `throttle_unauthenticated`' do | ||||
|       stub_env('GITLAB_THROTTLE_DRY_RUN', 'throttle_unauthenticated') | ||||
| 
 | ||||
|       described_class.configure(fake_rack_attack) | ||||
| 
 | ||||
|       expect(fake_rack_attack).to have_received(:track).with('throttle_unauthenticated_api', throttles[:throttle_unauthenticated_api]) | ||||
|       expect(fake_rack_attack).to have_received(:track).with('throttle_unauthenticated_web', throttles[:throttle_unauthenticated_web]) | ||||
|     end | ||||
| 
 | ||||
|     context 'user allowlist' do | ||||
|       subject { described_class.user_allowlist } | ||||
| 
 | ||||
|  |  | |||
|  | @ -8,37 +8,55 @@ RSpec.describe DesignManagement::Action do | |||
|   end | ||||
| 
 | ||||
|   describe 'scopes' do | ||||
|     describe '.most_recent' do | ||||
|       let_it_be(:design_a) { create(:design) } | ||||
|       let_it_be(:design_b) { create(:design) } | ||||
|       let_it_be(:design_c) { create(:design) } | ||||
|     let_it_be(:issue) { create(:issue) } | ||||
|     let_it_be(:design_a) { create(:design, issue: issue) } | ||||
|     let_it_be(:design_b) { create(:design, issue: issue) } | ||||
| 
 | ||||
|       let(:designs) { [design_a, design_b, design_c] } | ||||
|     context 'with 3 designs' do | ||||
|       let_it_be(:design_c) { create(:design, issue: issue) } | ||||
| 
 | ||||
|       before_all do | ||||
|         create(:design_version, designs: [design_a, design_b, design_c]) | ||||
|         create(:design_version, designs: [design_a, design_b]) | ||||
|         create(:design_version, designs: [design_a]) | ||||
|       let_it_be(:action_a_1) { create(:design_action, design: design_a) } | ||||
|       let_it_be(:action_a_2) { create(:design_action, design: design_a, event: :deletion) } | ||||
|       let_it_be(:action_b)   { create(:design_action, design: design_b) } | ||||
|       let_it_be(:action_c)   { create(:design_action, design: design_c, event: :deletion) } | ||||
| 
 | ||||
|       describe '.most_recent' do | ||||
|         let(:designs) { [design_a, design_b, design_c] } | ||||
| 
 | ||||
|         before_all do | ||||
|           create(:design_version, designs: [design_a, design_b, design_c]) | ||||
|           create(:design_version, designs: [design_a, design_b]) | ||||
|           create(:design_version, designs: [design_a]) | ||||
|         end | ||||
| 
 | ||||
|         it 'finds the correct version for each design' do | ||||
|           dvs = described_class.where(design: designs) | ||||
| 
 | ||||
|           expected = designs | ||||
|             .map(&:id) | ||||
|             .zip(dvs.order("version_id DESC").pluck(:version_id).uniq) | ||||
| 
 | ||||
|           actual = dvs.most_recent.map { |dv| [dv.design_id, dv.version_id] } | ||||
| 
 | ||||
|           expect(actual).to eq(expected) | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       it 'finds the correct version for each design' do | ||||
|         dvs = described_class.where(design: designs) | ||||
|       describe '.by_design' do | ||||
|         it 'returns the actions by design_id' do | ||||
|           expect(described_class.by_design([design_a.id, design_b.id])) | ||||
|             .to match_array([action_a_1, action_a_2, action_b]) | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|         expected = designs | ||||
|           .map(&:id) | ||||
|           .zip(dvs.order("version_id DESC").pluck(:version_id).uniq) | ||||
| 
 | ||||
|         actual = dvs.most_recent.map { |dv| [dv.design_id, dv.version_id] } | ||||
| 
 | ||||
|         expect(actual).to eq(expected) | ||||
|       describe '.by_event' do | ||||
|         it 'returns the actions by event type' do | ||||
|           expect(described_class.by_event(:deletion)).to match_array([action_a_2, action_c]) | ||||
|         end | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     describe '.up_to_version' do | ||||
|       let_it_be(:issue) { create(:issue) } | ||||
|       let_it_be(:design_a) { create(:design, issue: issue) } | ||||
|       let_it_be(:design_b) { create(:design, issue: issue) } | ||||
| 
 | ||||
|       # let bindings are not available in before(:all) contexts, | ||||
|       # so we need to redefine the array on each construction. | ||||
|       let_it_be(:oldest) { create(:design_version, designs: [design_a, design_b]) } | ||||
|  |  | |||
|  | @ -7,6 +7,30 @@ RSpec.describe API::ErrorTrackingCollector do | |||
|   let_it_be(:setting) { create(:project_error_tracking_setting, :integrated, project: project) } | ||||
|   let_it_be(:client_key) { create(:error_tracking_client_key, project: project) } | ||||
| 
 | ||||
|   RSpec.shared_examples 'not found' do | ||||
|     it 'reponds with 404' do | ||||
|       subject | ||||
| 
 | ||||
|       expect(response).to have_gitlab_http_status(:not_found) | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   RSpec.shared_examples 'bad request' do | ||||
|     it 'responds with 400' do | ||||
|       subject | ||||
| 
 | ||||
|       expect(response).to have_gitlab_http_status(:bad_request) | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   RSpec.shared_examples 'successful request' do | ||||
|     it 'writes to the database and returns no content' do | ||||
|       expect { subject }.to change { ErrorTracking::ErrorEvent.count }.by(1) | ||||
| 
 | ||||
|       expect(response).to have_gitlab_http_status(:no_content) | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   describe "POST /error_tracking/collector/api/:id/envelope" do | ||||
|     let_it_be(:raw_event) { fixture_file('error_tracking/event.txt') } | ||||
|     let_it_be(:url) { "/error_tracking/collector/api/#{project.id}/envelope" } | ||||
|  | @ -16,22 +40,6 @@ RSpec.describe API::ErrorTrackingCollector do | |||
| 
 | ||||
|     subject { post api(url), params: params, headers: headers } | ||||
| 
 | ||||
|     RSpec.shared_examples 'not found' do | ||||
|       it 'reponds with 404' do | ||||
|         subject | ||||
| 
 | ||||
|         expect(response).to have_gitlab_http_status(:not_found) | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     RSpec.shared_examples 'bad request' do | ||||
|       it 'responds with 400' do | ||||
|         subject | ||||
| 
 | ||||
|         expect(response).to have_gitlab_http_status(:bad_request) | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     context 'error tracking feature is disabled' do | ||||
|       before do | ||||
|         setting.update!(enabled: false) | ||||
|  | @ -88,10 +96,53 @@ RSpec.describe API::ErrorTrackingCollector do | |||
|       end | ||||
|     end | ||||
| 
 | ||||
|     it 'writes to the database and returns no content' do | ||||
|       expect { subject }.to change { ErrorTracking::ErrorEvent.count }.by(1) | ||||
|     it_behaves_like 'successful request' | ||||
|   end | ||||
| 
 | ||||
|       expect(response).to have_gitlab_http_status(:no_content) | ||||
|   describe "POST /error_tracking/collector/api/:id/store" do | ||||
|     let_it_be(:raw_event) { fixture_file('error_tracking/parsed_event.json') } | ||||
|     let_it_be(:url) { "/error_tracking/collector/api/#{project.id}/store" } | ||||
| 
 | ||||
|     let(:params) { raw_event } | ||||
|     let(:headers) { { 'X-Sentry-Auth' => "Sentry sentry_key=#{client_key.public_key}" } } | ||||
| 
 | ||||
|     subject { post api(url), params: params, headers: headers } | ||||
| 
 | ||||
|     it_behaves_like 'successful request' | ||||
| 
 | ||||
|     context 'empty headers' do | ||||
|       let(:headers) { {} } | ||||
| 
 | ||||
|       it_behaves_like 'bad request' | ||||
|     end | ||||
| 
 | ||||
|     context 'empty body' do | ||||
|       let(:params) { '' } | ||||
| 
 | ||||
|       it_behaves_like 'bad request' | ||||
|     end | ||||
| 
 | ||||
|     context 'sentry_key as param and empty headers' do | ||||
|       let(:url) { "/error_tracking/collector/api/#{project.id}/store?sentry_key=#{sentry_key}" } | ||||
|       let(:headers) { {} } | ||||
| 
 | ||||
|       context 'key is wrong' do | ||||
|         let(:sentry_key) { 'glet_1fedb514e17f4b958435093deb02048c' } | ||||
| 
 | ||||
|         it_behaves_like 'not found' | ||||
|       end | ||||
| 
 | ||||
|       context 'key is empty' do | ||||
|         let(:sentry_key) { '' } | ||||
| 
 | ||||
|         it_behaves_like 'bad request' | ||||
|       end | ||||
| 
 | ||||
|       context 'key is correct' do | ||||
|         let(:sentry_key) { client_key.public_key } | ||||
| 
 | ||||
|         it_behaves_like 'successful request' | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  |  | |||
|  | @ -2602,15 +2602,13 @@ RSpec.describe API::Users do | |||
|         let(:api_user) { admin } | ||||
| 
 | ||||
|         context 'for a deactivated user' do | ||||
|           before do | ||||
|             user.deactivate | ||||
|           end | ||||
|           let(:user_id) { deactivated_user.id } | ||||
| 
 | ||||
|           it 'activates a deactivated user' do | ||||
|             activate | ||||
| 
 | ||||
|             expect(response).to have_gitlab_http_status(:created) | ||||
|             expect(user.reload.state).to eq('active') | ||||
|             expect(deactivated_user.reload.state).to eq('active') | ||||
|           end | ||||
|         end | ||||
| 
 | ||||
|  | @ -2714,15 +2712,13 @@ RSpec.describe API::Users do | |||
|         end | ||||
| 
 | ||||
|         context 'for a deactivated user' do | ||||
|           before do | ||||
|             user.deactivate | ||||
|           end | ||||
|           let(:user_id) { deactivated_user.id } | ||||
| 
 | ||||
|           it 'returns 201' do | ||||
|             deactivate | ||||
| 
 | ||||
|             expect(response).to have_gitlab_http_status(:created) | ||||
|             expect(user.reload.state).to eq('deactivated') | ||||
|             expect(deactivated_user.reload.state).to eq('deactivated') | ||||
|           end | ||||
|         end | ||||
| 
 | ||||
|  | @ -2791,7 +2787,6 @@ RSpec.describe API::Users do | |||
|     describe 'POST /users/:id/approve' do | ||||
|       subject(:approve) { post api("/users/#{user_id}/approve", api_user) } | ||||
| 
 | ||||
|       let_it_be(:deactivated_user) { create(:user, :deactivated) } | ||||
|       let_it_be(:blocked_user) { create(:user, :blocked) } | ||||
| 
 | ||||
|       context 'performed by a non-admin user' do | ||||
|  |  | |||
|  | @ -11,6 +11,8 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac | |||
|   # the right settings are being exercised | ||||
|   let(:settings_to_set) do | ||||
|     { | ||||
|       throttle_unauthenticated_api_requests_per_period: 100, | ||||
|       throttle_unauthenticated_api_period_in_seconds: 1, | ||||
|       throttle_unauthenticated_requests_per_period: 100, | ||||
|       throttle_unauthenticated_period_in_seconds: 1, | ||||
|       throttle_authenticated_api_requests_per_period: 100, | ||||
|  | @ -39,186 +41,21 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac | |||
| 
 | ||||
|   include_context 'rack attack cache store' | ||||
| 
 | ||||
|   describe 'unauthenticated requests' do | ||||
|     let(:url_that_does_not_require_authentication) { '/users/sign_in' } | ||||
|     let(:url_api_internal) { '/api/v4/internal/check' } | ||||
| 
 | ||||
|     before do | ||||
|       # Disabling protected paths throttle, otherwise requests to | ||||
|       # '/users/sign_in' are caught by this throttle. | ||||
|       settings_to_set[:throttle_protected_paths_enabled] = false | ||||
| 
 | ||||
|       # Set low limits | ||||
|       settings_to_set[:throttle_unauthenticated_requests_per_period] = requests_per_period | ||||
|       settings_to_set[:throttle_unauthenticated_period_in_seconds] = period_in_seconds | ||||
|   describe 'unauthenticated API requests' do | ||||
|     it_behaves_like 'rate-limited unauthenticated requests' do | ||||
|       let(:throttle_name) { 'throttle_unauthenticated_api' } | ||||
|       let(:throttle_setting_prefix) { 'throttle_unauthenticated_api' } | ||||
|       let(:url_that_does_not_require_authentication) { '/api/v4/projects' } | ||||
|       let(:url_that_is_not_matched) { '/users/sign_in' } | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|     context 'when the throttle is enabled' do | ||||
|       before do | ||||
|         settings_to_set[:throttle_unauthenticated_enabled] = true | ||||
|         stub_application_setting(settings_to_set) | ||||
|       end | ||||
| 
 | ||||
|       it 'rejects requests over the rate limit' do | ||||
|         # At first, allow requests under the rate limit. | ||||
|         requests_per_period.times do | ||||
|           get url_that_does_not_require_authentication | ||||
|           expect(response).to have_gitlab_http_status(:ok) | ||||
|         end | ||||
| 
 | ||||
|         # the last straw | ||||
|         expect_rejection { get url_that_does_not_require_authentication } | ||||
|       end | ||||
| 
 | ||||
|       context 'with custom response text' do | ||||
|         before do | ||||
|           stub_application_setting(rate_limiting_response_text: 'Custom response') | ||||
|         end | ||||
| 
 | ||||
|         it 'rejects requests over the rate limit' do | ||||
|           # At first, allow requests under the rate limit. | ||||
|           requests_per_period.times do | ||||
|             get url_that_does_not_require_authentication | ||||
|             expect(response).to have_gitlab_http_status(:ok) | ||||
|           end | ||||
| 
 | ||||
|           # the last straw | ||||
|           expect_rejection { get url_that_does_not_require_authentication } | ||||
|           expect(response.body).to eq("Custom response\n") | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       it 'allows requests after throttling and then waiting for the next period' do | ||||
|         requests_per_period.times do | ||||
|           get url_that_does_not_require_authentication | ||||
|           expect(response).to have_gitlab_http_status(:ok) | ||||
|         end | ||||
| 
 | ||||
|         expect_rejection { get url_that_does_not_require_authentication } | ||||
| 
 | ||||
|         travel_to(period.from_now) do | ||||
|           requests_per_period.times do | ||||
|             get url_that_does_not_require_authentication | ||||
|             expect(response).to have_gitlab_http_status(:ok) | ||||
|           end | ||||
| 
 | ||||
|           expect_rejection { get url_that_does_not_require_authentication } | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       it 'counts requests from different IPs separately' do | ||||
|         requests_per_period.times do | ||||
|           get url_that_does_not_require_authentication | ||||
|           expect(response).to have_gitlab_http_status(:ok) | ||||
|         end | ||||
| 
 | ||||
|         expect_next_instance_of(Rack::Attack::Request) do |instance| | ||||
|           expect(instance).to receive(:ip).at_least(:once).and_return('1.2.3.4') | ||||
|         end | ||||
| 
 | ||||
|         # would be over limit for the same IP | ||||
|         get url_that_does_not_require_authentication | ||||
|         expect(response).to have_gitlab_http_status(:ok) | ||||
|       end | ||||
| 
 | ||||
|       context 'when the request is to the api internal endpoints' do | ||||
|         it 'allows requests over the rate limit' do | ||||
|           (1 + requests_per_period).times do | ||||
|             get url_api_internal, params: { secret_token: Gitlab::Shell.secret_token } | ||||
|             expect(response).to have_gitlab_http_status(:ok) | ||||
|           end | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       context 'when the request is authenticated by a runner token' do | ||||
|         let(:request_jobs_url) { '/api/v4/jobs/request' } | ||||
|         let(:runner) { create(:ci_runner) } | ||||
| 
 | ||||
|         it 'does not count as unauthenticated' do | ||||
|           (1 + requests_per_period).times do | ||||
|             post request_jobs_url, params: { token: runner.token } | ||||
|             expect(response).to have_gitlab_http_status(:no_content) | ||||
|           end | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       context 'when the request is to a health endpoint' do | ||||
|         let(:health_endpoint) { '/-/metrics' } | ||||
| 
 | ||||
|         it 'does not throttle the requests' do | ||||
|           (1 + requests_per_period).times do | ||||
|             get health_endpoint | ||||
|             expect(response).to have_gitlab_http_status(:ok) | ||||
|           end | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       context 'when the request is to a container registry notification endpoint' do | ||||
|         let(:secret_token) { 'secret_token' } | ||||
|         let(:events) { [{ action: 'push' }] } | ||||
|         let(:registry_endpoint) { '/api/v4/container_registry_event/events' } | ||||
|         let(:registry_headers) { { 'Content-Type' => ::API::ContainerRegistryEvent::DOCKER_DISTRIBUTION_EVENTS_V1_JSON } } | ||||
| 
 | ||||
|         before do | ||||
|           allow(Gitlab.config.registry).to receive(:notification_secret) { secret_token } | ||||
| 
 | ||||
|           event = spy(:event) | ||||
|           allow(::ContainerRegistry::Event).to receive(:new).and_return(event) | ||||
|           allow(event).to receive(:supported?).and_return(true) | ||||
|         end | ||||
| 
 | ||||
|         it 'does not throttle the requests' do | ||||
|           (1 + requests_per_period).times do | ||||
|             post registry_endpoint, | ||||
|                  params: { events: events }.to_json, | ||||
|                  headers: registry_headers.merge('Authorization' => secret_token) | ||||
| 
 | ||||
|             expect(response).to have_gitlab_http_status(:ok) | ||||
|           end | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       it 'logs RackAttack info into structured logs' do | ||||
|         requests_per_period.times do | ||||
|           get url_that_does_not_require_authentication | ||||
|           expect(response).to have_gitlab_http_status(:ok) | ||||
|         end | ||||
| 
 | ||||
|         arguments = a_hash_including({ | ||||
|           message: 'Rack_Attack', | ||||
|           env: :throttle, | ||||
|           remote_ip: '127.0.0.1', | ||||
|           request_method: 'GET', | ||||
|           path: '/users/sign_in', | ||||
|           matched: 'throttle_unauthenticated' | ||||
|         }) | ||||
| 
 | ||||
|         expect(Gitlab::AuthLogger).to receive(:error).with(arguments) | ||||
| 
 | ||||
|         get url_that_does_not_require_authentication | ||||
|       end | ||||
| 
 | ||||
|       it_behaves_like 'tracking when dry-run mode is set' do | ||||
|         let(:throttle_name) { 'throttle_unauthenticated' } | ||||
| 
 | ||||
|         def do_request | ||||
|           get url_that_does_not_require_authentication | ||||
|         end | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     context 'when the throttle is disabled' do | ||||
|       before do | ||||
|         settings_to_set[:throttle_unauthenticated_enabled] = false | ||||
|         stub_application_setting(settings_to_set) | ||||
|       end | ||||
| 
 | ||||
|       it 'allows requests over the rate limit' do | ||||
|         (1 + requests_per_period).times do | ||||
|           get url_that_does_not_require_authentication | ||||
|           expect(response).to have_gitlab_http_status(:ok) | ||||
|         end | ||||
|       end | ||||
|   describe 'unauthenticated web requests' do | ||||
|     it_behaves_like 'rate-limited unauthenticated requests' do | ||||
|       let(:throttle_name) { 'throttle_unauthenticated_web' } | ||||
|       let(:throttle_setting_prefix) { 'throttle_unauthenticated' } | ||||
|       let(:url_that_does_not_require_authentication) { '/users/sign_in' } | ||||
|       let(:url_that_is_not_matched) { '/api/v4/projects' } | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|  | @ -479,9 +316,9 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac | |||
| 
 | ||||
|         context 'when unauthenticated api throttle is enabled' do | ||||
|           before do | ||||
|             settings_to_set[:throttle_unauthenticated_requests_per_period] = requests_per_period | ||||
|             settings_to_set[:throttle_unauthenticated_period_in_seconds] = period_in_seconds | ||||
|             settings_to_set[:throttle_unauthenticated_enabled] = true | ||||
|             settings_to_set[:throttle_unauthenticated_api_requests_per_period] = requests_per_period | ||||
|             settings_to_set[:throttle_unauthenticated_api_period_in_seconds] = period_in_seconds | ||||
|             settings_to_set[:throttle_unauthenticated_api_enabled] = true | ||||
|             stub_application_setting(settings_to_set) | ||||
|           end | ||||
| 
 | ||||
|  | @ -494,6 +331,22 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac | |||
|             expect_rejection { do_request } | ||||
|           end | ||||
|         end | ||||
| 
 | ||||
|         context 'when unauthenticated web throttle is enabled' do | ||||
|           before do | ||||
|             settings_to_set[:throttle_unauthenticated_web_requests_per_period] = requests_per_period | ||||
|             settings_to_set[:throttle_unauthenticated_web_period_in_seconds] = period_in_seconds | ||||
|             settings_to_set[:throttle_unauthenticated_web_enabled] = true | ||||
|             stub_application_setting(settings_to_set) | ||||
|           end | ||||
| 
 | ||||
|           it 'ignores unauthenticated web throttle' do | ||||
|             (1 + requests_per_period).times do | ||||
|               do_request | ||||
|               expect(response).to have_gitlab_http_status(:ok) | ||||
|             end | ||||
|           end | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       context 'when unauthenticated packages api throttle is enabled' do | ||||
|  | @ -515,9 +368,9 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac | |||
| 
 | ||||
|         context 'when unauthenticated api throttle is lower' do | ||||
|           before do | ||||
|             settings_to_set[:throttle_unauthenticated_requests_per_period] = 0 | ||||
|             settings_to_set[:throttle_unauthenticated_period_in_seconds] = period_in_seconds | ||||
|             settings_to_set[:throttle_unauthenticated_enabled] = true | ||||
|             settings_to_set[:throttle_unauthenticated_api_requests_per_period] = 0 | ||||
|             settings_to_set[:throttle_unauthenticated_api_period_in_seconds] = period_in_seconds | ||||
|             settings_to_set[:throttle_unauthenticated_api_enabled] = true | ||||
|             stub_application_setting(settings_to_set) | ||||
|           end | ||||
| 
 | ||||
|  | @ -748,9 +601,9 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac | |||
| 
 | ||||
|         context 'when unauthenticated api throttle is enabled' do | ||||
|           before do | ||||
|             settings_to_set[:throttle_unauthenticated_requests_per_period] = requests_per_period | ||||
|             settings_to_set[:throttle_unauthenticated_period_in_seconds] = period_in_seconds | ||||
|             settings_to_set[:throttle_unauthenticated_enabled] = true | ||||
|             settings_to_set[:throttle_unauthenticated_api_requests_per_period] = requests_per_period | ||||
|             settings_to_set[:throttle_unauthenticated_api_period_in_seconds] = period_in_seconds | ||||
|             settings_to_set[:throttle_unauthenticated_api_enabled] = true | ||||
|             stub_application_setting(settings_to_set) | ||||
|           end | ||||
| 
 | ||||
|  | @ -763,6 +616,22 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac | |||
|             expect_rejection { do_request } | ||||
|           end | ||||
|         end | ||||
| 
 | ||||
|         context 'when unauthenticated web throttle is enabled' do | ||||
|           before do | ||||
|             settings_to_set[:throttle_unauthenticated_web_requests_per_period] = requests_per_period | ||||
|             settings_to_set[:throttle_unauthenticated_web_period_in_seconds] = period_in_seconds | ||||
|             settings_to_set[:throttle_unauthenticated_web_enabled] = true | ||||
|             stub_application_setting(settings_to_set) | ||||
|           end | ||||
| 
 | ||||
|           it 'ignores unauthenticated web throttle' do | ||||
|             (1 + requests_per_period).times do | ||||
|               do_request | ||||
|               expect(response).to have_gitlab_http_status(:ok) | ||||
|             end | ||||
|           end | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       context 'when unauthenticated files api throttle is enabled' do | ||||
|  | @ -797,9 +666,9 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac | |||
| 
 | ||||
|         context 'when unauthenticated api throttle is lower' do | ||||
|           before do | ||||
|             settings_to_set[:throttle_unauthenticated_requests_per_period] = 0 | ||||
|             settings_to_set[:throttle_unauthenticated_period_in_seconds] = period_in_seconds | ||||
|             settings_to_set[:throttle_unauthenticated_enabled] = true | ||||
|             settings_to_set[:throttle_unauthenticated_api_requests_per_period] = 0 | ||||
|             settings_to_set[:throttle_unauthenticated_api_period_in_seconds] = period_in_seconds | ||||
|             settings_to_set[:throttle_unauthenticated_api_enabled] = true | ||||
|             stub_application_setting(settings_to_set) | ||||
|           end | ||||
| 
 | ||||
|  |  | |||
|  | @ -0,0 +1,33 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe CustomerRelations::Organizations::CreateService do | ||||
|   describe '#execute' do | ||||
|     let_it_be(:user) { create(:user) } | ||||
| 
 | ||||
|     let(:group) { create(:group) } | ||||
|     let(:params) { attributes_for(:organization, group: group) } | ||||
| 
 | ||||
|     subject(:response) { described_class.new(group: group, current_user: user, params: params).execute } | ||||
| 
 | ||||
|     it 'creates an organization' do | ||||
|       group.add_reporter(user) | ||||
| 
 | ||||
|       expect(response).to be_success | ||||
|     end | ||||
| 
 | ||||
|     it 'returns an error when user does not have permission' do | ||||
|       expect(response).to be_error | ||||
|       expect(response.message).to eq('You have insufficient permissions to create an organization for this group') | ||||
|     end | ||||
| 
 | ||||
|     it 'returns an error when the organization is not persisted' do | ||||
|       group.add_reporter(user) | ||||
|       params[:name] = nil | ||||
| 
 | ||||
|       expect(response).to be_error | ||||
|       expect(response.message).to eq(["Name can't be blank"]) | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -149,6 +149,12 @@ RSpec.describe DesignManagement::DeleteDesignsService do | |||
|           expect { run_service } | ||||
|             .to change { designs.first.deleted? }.from(false).to(true) | ||||
|         end | ||||
| 
 | ||||
|         it 'schedules deleting todos for that design' do | ||||
|           expect(TodosDestroyer::DestroyedDesignsWorker).to receive(:perform_async).with([designs.first.id]) | ||||
| 
 | ||||
|           run_service | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       context 'more than one design is passed' do | ||||
|  | @ -168,6 +174,12 @@ RSpec.describe DesignManagement::DeleteDesignsService do | |||
|             .and change { Event.destroyed_action.for_design.count }.by(2) | ||||
|         end | ||||
| 
 | ||||
|         it 'schedules deleting todos for that design' do | ||||
|           expect(TodosDestroyer::DestroyedDesignsWorker).to receive(:perform_async).with(designs.map(&:id)) | ||||
| 
 | ||||
|           run_service | ||||
|         end | ||||
| 
 | ||||
|         it_behaves_like "a success" | ||||
| 
 | ||||
|         context 'after executing the service' do | ||||
|  |  | |||
|  | @ -40,5 +40,29 @@ RSpec.describe ErrorTracking::CollectErrorService do | |||
|       expect(event.environment).to eq 'development' | ||||
|       expect(event.payload).to eq parsed_event | ||||
|     end | ||||
| 
 | ||||
|     context 'unusual payload' do | ||||
|       let(:modified_event) { parsed_event } | ||||
| 
 | ||||
|       context 'missing transaction' do | ||||
|         it 'builds actor from stacktrace' do | ||||
|           modified_event.delete('transaction') | ||||
| 
 | ||||
|           event = described_class.new(project, nil, event: modified_event).execute | ||||
| 
 | ||||
|           expect(event.error.actor).to eq 'find()' | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       context 'timestamp is numeric' do | ||||
|         it 'parses timestamp' do | ||||
|           modified_event['timestamp'] = '1631015580.50' | ||||
| 
 | ||||
|           event = described_class.new(project, nil, event: modified_event).execute | ||||
| 
 | ||||
|           expect(event.occurred_at).to eq '2021-09-07T11:53:00.5' | ||||
|         end | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  |  | |||
|  | @ -0,0 +1,40 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe Todos::Destroy::DesignService do | ||||
|   let_it_be(:user)     { create(:user) } | ||||
|   let_it_be(:user_2)   { create(:user) } | ||||
|   let_it_be(:design)   { create(:design) } | ||||
|   let_it_be(:design_2) { create(:design) } | ||||
|   let_it_be(:design_3) { create(:design) } | ||||
| 
 | ||||
|   let_it_be(:create_action)   { create(:design_action, design: design)} | ||||
|   let_it_be(:create_action_2) { create(:design_action, design: design_2)} | ||||
| 
 | ||||
|   describe '#execute' do | ||||
|     before do | ||||
|       create(:todo, user: user, target: design) | ||||
|       create(:todo, user: user_2, target: design) | ||||
|       create(:todo, user: user, target: design_2) | ||||
|       create(:todo, user: user, target: design_3) | ||||
|     end | ||||
| 
 | ||||
|     subject { described_class.new([design.id, design_2.id, design_3.id]).execute } | ||||
| 
 | ||||
|     context 'when the design has been archived' do | ||||
|       let_it_be(:archive_action) { create(:design_action, design: design, event: :deletion)} | ||||
|       let_it_be(:archive_action_2) { create(:design_action, design: design_3, event: :deletion)} | ||||
| 
 | ||||
|       it 'removes todos for that design' do | ||||
|         expect { subject }.to change { Todo.count }.from(4).to(1) | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     context 'when no design has been archived' do | ||||
|       it 'does not remove any todos' do | ||||
|         expect { subject }.not_to change { Todo.count }.from(4) | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -127,7 +127,6 @@ | |||
| - "./spec/models/clusters/applications/runner_spec.rb" | ||||
| - "./spec/models/deployment_spec.rb" | ||||
| - "./spec/models/environment_spec.rb" | ||||
| - "./spec/models/environment_status_spec.rb" | ||||
| - "./spec/models/merge_request_spec.rb" | ||||
| - "./spec/models/project_spec.rb" | ||||
| - "./spec/models/user_spec.rb" | ||||
|  |  | |||
|  | @ -388,3 +388,194 @@ RSpec.shared_examples 'tracking when dry-run mode is set' do | |||
|     end | ||||
|   end | ||||
| end | ||||
| 
 | ||||
| # Requires let variables: | ||||
| # * throttle_name: "throttle_unauthenticated_api", "throttle_unauthenticated_web" | ||||
| # * throttle_setting_prefix: "throttle_unauthenticated_api", "throttle_unauthenticated" | ||||
| # * url_that_does_not_require_authentication | ||||
| # * url_that_is_not_matched | ||||
| # * requests_per_period | ||||
| # * period_in_seconds | ||||
| # * period | ||||
| RSpec.shared_examples 'rate-limited unauthenticated requests' do | ||||
|   before do | ||||
|     # Set low limits | ||||
|     settings_to_set[:"#{throttle_setting_prefix}_requests_per_period"] = requests_per_period | ||||
|     settings_to_set[:"#{throttle_setting_prefix}_period_in_seconds"] = period_in_seconds | ||||
|   end | ||||
| 
 | ||||
|   context 'when the throttle is enabled' do | ||||
|     before do | ||||
|       settings_to_set[:"#{throttle_setting_prefix}_enabled"] = true | ||||
|       stub_application_setting(settings_to_set) | ||||
|     end | ||||
| 
 | ||||
|     it 'rejects requests over the rate limit' do | ||||
|       # At first, allow requests under the rate limit. | ||||
|       requests_per_period.times do | ||||
|         get url_that_does_not_require_authentication | ||||
|         expect(response).to have_gitlab_http_status(:ok) | ||||
|       end | ||||
| 
 | ||||
|       # the last straw | ||||
|       expect_rejection { get url_that_does_not_require_authentication } | ||||
|     end | ||||
| 
 | ||||
|     context 'with custom response text' do | ||||
|       before do | ||||
|         stub_application_setting(rate_limiting_response_text: 'Custom response') | ||||
|       end | ||||
| 
 | ||||
|       it 'rejects requests over the rate limit' do | ||||
|         # At first, allow requests under the rate limit. | ||||
|         requests_per_period.times do | ||||
|           get url_that_does_not_require_authentication | ||||
|           expect(response).to have_gitlab_http_status(:ok) | ||||
|         end | ||||
| 
 | ||||
|         # the last straw | ||||
|         expect_rejection { get url_that_does_not_require_authentication } | ||||
|         expect(response.body).to eq("Custom response\n") | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     it 'allows requests after throttling and then waiting for the next period' do | ||||
|       requests_per_period.times do | ||||
|         get url_that_does_not_require_authentication | ||||
|         expect(response).to have_gitlab_http_status(:ok) | ||||
|       end | ||||
| 
 | ||||
|       expect_rejection { get url_that_does_not_require_authentication } | ||||
| 
 | ||||
|       travel_to(period.from_now) do | ||||
|         requests_per_period.times do | ||||
|           get url_that_does_not_require_authentication | ||||
|           expect(response).to have_gitlab_http_status(:ok) | ||||
|         end | ||||
| 
 | ||||
|         expect_rejection { get url_that_does_not_require_authentication } | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     it 'counts requests from different IPs separately' do | ||||
|       requests_per_period.times do | ||||
|         get url_that_does_not_require_authentication | ||||
|         expect(response).to have_gitlab_http_status(:ok) | ||||
|       end | ||||
| 
 | ||||
|       expect_next_instance_of(Rack::Attack::Request) do |instance| | ||||
|         expect(instance).to receive(:ip).at_least(:once).and_return('1.2.3.4') | ||||
|       end | ||||
| 
 | ||||
|       # would be over limit for the same IP | ||||
|       get url_that_does_not_require_authentication | ||||
|       expect(response).to have_gitlab_http_status(:ok) | ||||
|     end | ||||
| 
 | ||||
|     context 'when the request is not matched by the throttle' do | ||||
|       it 'does not throttle the requests' do | ||||
|         (1 + requests_per_period).times do | ||||
|           get url_that_is_not_matched | ||||
|           expect(response).to have_gitlab_http_status(:ok) | ||||
|         end | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     context 'when the request is to the api internal endpoints' do | ||||
|       it 'allows requests over the rate limit' do | ||||
|         (1 + requests_per_period).times do | ||||
|           get '/api/v4/internal/check', params: { secret_token: Gitlab::Shell.secret_token } | ||||
|           expect(response).to have_gitlab_http_status(:ok) | ||||
|         end | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     context 'when the request is authenticated by a runner token' do | ||||
|       let(:request_jobs_url) { '/api/v4/jobs/request' } | ||||
|       let(:runner) { create(:ci_runner) } | ||||
| 
 | ||||
|       it 'does not count as unauthenticated' do | ||||
|         (1 + requests_per_period).times do | ||||
|           post request_jobs_url, params: { token: runner.token } | ||||
|           expect(response).to have_gitlab_http_status(:no_content) | ||||
|         end | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     context 'when the request is to a health endpoint' do | ||||
|       let(:health_endpoint) { '/-/metrics' } | ||||
| 
 | ||||
|       it 'does not throttle the requests' do | ||||
|         (1 + requests_per_period).times do | ||||
|           get health_endpoint | ||||
|           expect(response).to have_gitlab_http_status(:ok) | ||||
|         end | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     context 'when the request is to a container registry notification endpoint' do | ||||
|       let(:secret_token) { 'secret_token' } | ||||
|       let(:events) { [{ action: 'push' }] } | ||||
|       let(:registry_endpoint) { '/api/v4/container_registry_event/events' } | ||||
|       let(:registry_headers) { { 'Content-Type' => ::API::ContainerRegistryEvent::DOCKER_DISTRIBUTION_EVENTS_V1_JSON } } | ||||
| 
 | ||||
|       before do | ||||
|         allow(Gitlab.config.registry).to receive(:notification_secret) { secret_token } | ||||
| 
 | ||||
|         event = spy(:event) | ||||
|         allow(::ContainerRegistry::Event).to receive(:new).and_return(event) | ||||
|         allow(event).to receive(:supported?).and_return(true) | ||||
|       end | ||||
| 
 | ||||
|       it 'does not throttle the requests' do | ||||
|         (1 + requests_per_period).times do | ||||
|           post registry_endpoint, | ||||
|                 params: { events: events }.to_json, | ||||
|                 headers: registry_headers.merge('Authorization' => secret_token) | ||||
| 
 | ||||
|           expect(response).to have_gitlab_http_status(:ok) | ||||
|         end | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     it 'logs RackAttack info into structured logs' do | ||||
|       requests_per_period.times do | ||||
|         get url_that_does_not_require_authentication | ||||
|         expect(response).to have_gitlab_http_status(:ok) | ||||
|       end | ||||
| 
 | ||||
|       arguments = a_hash_including({ | ||||
|         message: 'Rack_Attack', | ||||
|         env: :throttle, | ||||
|         remote_ip: '127.0.0.1', | ||||
|         request_method: 'GET', | ||||
|         path: url_that_does_not_require_authentication, | ||||
|         matched: throttle_name | ||||
|       }) | ||||
| 
 | ||||
|       expect(Gitlab::AuthLogger).to receive(:error).with(arguments) | ||||
| 
 | ||||
|       get url_that_does_not_require_authentication | ||||
|     end | ||||
| 
 | ||||
|     it_behaves_like 'tracking when dry-run mode is set' do | ||||
|       def do_request | ||||
|         get url_that_does_not_require_authentication | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   context 'when the throttle is disabled' do | ||||
|     before do | ||||
|       settings_to_set[:"#{throttle_setting_prefix}_enabled"] = false | ||||
|       stub_application_setting(settings_to_set) | ||||
|     end | ||||
| 
 | ||||
|     it 'allows requests over the rate limit' do | ||||
|       (1 + requests_per_period).times do | ||||
|         get url_that_does_not_require_authentication | ||||
|         expect(response).to have_gitlab_http_status(:ok) | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  |  | |||
|  | @ -383,10 +383,30 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do | |||
|         create(:project, :repository) | ||||
|       end | ||||
| 
 | ||||
|       it 'has defaults' do | ||||
|         expect_next_instance_of(::Backup::Repositories) do |instance| | ||||
|           expect(instance).to receive(:dump) | ||||
|             .with(max_concurrency: 1, max_storage_concurrency: 1) | ||||
|             .and_call_original | ||||
|         end | ||||
| 
 | ||||
|         expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process | ||||
|       end | ||||
| 
 | ||||
|       it 'passes through concurrency environment variables' do | ||||
|         # The way concurrency is handled will change with the `gitaly_backup` | ||||
|         # feature flag. For now we need to check that both ways continue to | ||||
|         # work. This will be cleaned up in the rollout issue. | ||||
|         # See https://gitlab.com/gitlab-org/gitlab/-/issues/333034 | ||||
| 
 | ||||
|         stub_env('GITLAB_BACKUP_MAX_CONCURRENCY', 5) | ||||
|         stub_env('GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY', 2) | ||||
| 
 | ||||
|         expect_next_instance_of(::Backup::Repositories) do |instance| | ||||
|           expect(instance).to receive(:dump) | ||||
|             .with(max_concurrency: 5, max_storage_concurrency: 2) | ||||
|             .and_call_original | ||||
|         end | ||||
|         expect(::Backup::GitalyBackup).to receive(:new).with(anything, parallel: 5, parallel_storage: 2).and_call_original | ||||
| 
 | ||||
|         expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout_from_any_process | ||||
|  |  | |||
|  | @ -25,7 +25,7 @@ RSpec.describe 'projects/project_members/index', :aggregate_failures do | |||
| 
 | ||||
|         expect(rendered).to have_content('Project members') | ||||
|         expect(rendered).to have_content('You can invite a new member') | ||||
|         expect(rendered).to have_link('Import a project') | ||||
|         expect(rendered).to have_selector('.js-import-a-project-modal') | ||||
|         expect(rendered).to have_selector('.js-invite-group-trigger') | ||||
|         expect(rendered).to have_selector('.js-invite-members-trigger') | ||||
|         expect(rendered).not_to have_content('Members can be added by project') | ||||
|  | @ -85,7 +85,7 @@ RSpec.describe 'projects/project_members/index', :aggregate_failures do | |||
| 
 | ||||
|         expect(rendered).to have_content('Project members') | ||||
|         expect(rendered).not_to have_content('You can invite a new member') | ||||
|         expect(rendered).not_to have_link('Import a project') | ||||
|         expect(rendered).not_to have_selector('.js-import-a-project-modal') | ||||
|         expect(rendered).not_to have_selector('.js-invite-group-trigger') | ||||
|         expect(rendered).not_to have_selector('.js-invite-members-trigger') | ||||
|         expect(rendered).to have_content('Members can be added by project') | ||||
|  |  | |||
|  | @ -436,6 +436,7 @@ RSpec.describe 'Every Sidekiq worker' do | |||
|         'TodosDestroyer::ConfidentialEpicWorker' => 3, | ||||
|         'TodosDestroyer::ConfidentialIssueWorker' => 3, | ||||
|         'TodosDestroyer::DestroyedIssuableWorker' => 3, | ||||
|         'TodosDestroyer::DestroyedDesignsWorker' => 3, | ||||
|         'TodosDestroyer::EntityLeaveWorker' => 3, | ||||
|         'TodosDestroyer::GroupPrivateWorker' => 3, | ||||
|         'TodosDestroyer::PrivateFeaturesWorker' => 3, | ||||
|  |  | |||
|  | @ -0,0 +1,14 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe TodosDestroyer::DestroyedDesignsWorker do | ||||
|   let(:service) { double } | ||||
| 
 | ||||
|   it 'calls the Todos::Destroy::DesignService with design_ids parameter' do | ||||
|     expect(::Todos::Destroy::DesignService).to receive(:new).with([1, 5]).and_return(service) | ||||
|     expect(service).to receive(:execute) | ||||
| 
 | ||||
|     described_class.new.perform([1, 5]) | ||||
|   end | ||||
| end | ||||
		Loading…
	
		Reference in New Issue