Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
		
							parent
							
								
									28cff3de2b
								
							
						
					
					
						commit
						b17372c8e7
					
				|  | @ -233,7 +233,6 @@ Rails/Pluck: | |||
|     - 'spec/requests/groups/autocomplete_sources_spec.rb' | ||||
|     - 'spec/requests/groups/milestones_controller_spec.rb' | ||||
|     - 'spec/requests/lfs_http_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_pipeline_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/pipeline_entity_spec.rb' | ||||
|     - 'spec/serializers/diff_file_entity_spec.rb' | ||||
|     - 'spec/serializers/stage_entity_spec.rb' | ||||
|  |  | |||
|  | @ -414,11 +414,6 @@ RSpec/FactoryBot/AvoidCreate: | |||
|     - 'spec/serializers/build_action_entity_spec.rb' | ||||
|     - 'spec/serializers/build_artifact_entity_spec.rb' | ||||
|     - 'spec/serializers/build_details_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_job_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_job_group_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_pipeline_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_pipeline_serializer_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_stage_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/downloadable_artifact_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/downloadable_artifact_serializer_spec.rb' | ||||
|     - 'spec/serializers/ci/job_entity_spec.rb' | ||||
|  |  | |||
|  | @ -3863,11 +3863,6 @@ RSpec/FeatureCategory: | |||
|     - 'spec/serializers/build_details_entity_spec.rb' | ||||
|     - 'spec/serializers/build_trace_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/codequality_mr_diff_report_serializer_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_job_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_job_group_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_pipeline_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_pipeline_serializer_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_stage_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/daily_build_group_report_result_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/daily_build_group_report_result_serializer_spec.rb' | ||||
|     - 'spec/serializers/ci/downloadable_artifact_entity_spec.rb' | ||||
|  |  | |||
|  | @ -2817,11 +2817,6 @@ RSpec/NamedSubject: | |||
|     - 'spec/serializers/build_details_entity_spec.rb' | ||||
|     - 'spec/serializers/build_trace_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/codequality_mr_diff_report_serializer_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_job_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_job_group_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_pipeline_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_pipeline_serializer_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_stage_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/downloadable_artifact_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/downloadable_artifact_serializer_spec.rb' | ||||
|     - 'spec/serializers/ci/group_variable_entity_spec.rb' | ||||
|  |  | |||
|  | @ -725,10 +725,6 @@ RSpec/VerifiedDoubles: | |||
|     - 'spec/serializers/build_action_entity_spec.rb' | ||||
|     - 'spec/serializers/build_details_entity_spec.rb' | ||||
|     - 'spec/serializers/build_trace_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_job_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_job_group_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_pipeline_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/dag_stage_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/daily_build_group_report_result_entity_spec.rb' | ||||
|     - 'spec/serializers/ci/daily_build_group_report_result_serializer_spec.rb' | ||||
|     - 'spec/serializers/ci/job_entity_spec.rb' | ||||
|  |  | |||
|  | @ -489,9 +489,6 @@ Style/InlineDisableAnnotation: | |||
|     - 'app/serializers/analytics/cycle_analytics/value_stream_entity.rb' | ||||
|     - 'app/serializers/analytics_build_entity.rb' | ||||
|     - 'app/serializers/analytics_issue_entity.rb' | ||||
|     - 'app/serializers/ci/dag_job_entity.rb' | ||||
|     - 'app/serializers/ci/dag_pipeline_entity.rb' | ||||
|     - 'app/serializers/ci/job_entity.rb' | ||||
|     - 'app/serializers/cluster_entity.rb' | ||||
|     - 'app/serializers/diffs_metadata_entity.rb' | ||||
|     - 'app/serializers/environment_serializer.rb' | ||||
|  |  | |||
|  | @ -1,2 +0,0 @@ | |||
| // /dag is an alias for show
 | ||||
| import '../show/index'; | ||||
|  | @ -24,11 +24,6 @@ export default { | |||
|       type: Object, | ||||
|       required: true, | ||||
|     }, | ||||
|     fadeDoneTodo: { | ||||
|       type: Boolean, | ||||
|       required: false, | ||||
|       default: false, | ||||
|     }, | ||||
|   }, | ||||
|   computed: { | ||||
|     isDone() { | ||||
|  | @ -40,9 +35,6 @@ export default { | |||
|     targetUrl() { | ||||
|       return this.todo.targetUrl; | ||||
|     }, | ||||
|     fadeTodo() { | ||||
|       return this.fadeDoneTodo && this.isDone; | ||||
|     }, | ||||
|     trackingLabel() { | ||||
|       return this.todo.targetType ?? 'UNKNOWN'; | ||||
|     }, | ||||
|  | @ -53,7 +45,6 @@ export default { | |||
| <template> | ||||
|   <li | ||||
|     class="gl-border-t gl-border-b gl-relative -gl-mt-px gl-block gl-px-5 gl-py-3 hover:gl-z-1 hover:gl-cursor-pointer hover:gl-border-blue-200 hover:gl-bg-blue-50" | ||||
|     :class="{ 'gl-border-gray-50 gl-bg-gray-10': fadeTodo }" | ||||
|   > | ||||
|     <gl-link | ||||
|       :href="targetUrl" | ||||
|  | @ -63,16 +54,18 @@ export default { | |||
|     > | ||||
|       <div | ||||
|         class="gl-w-64 gl-flex-grow-2 gl-self-center gl-overflow-hidden gl-overflow-x-auto sm:gl-w-auto" | ||||
|         :class="{ 'gl-opacity-5': fadeTodo }" | ||||
|       > | ||||
|         <todo-item-title :todo="todo" /> | ||||
|         <todo-item-body :todo="todo" :current-user-id="currentUserId" /> | ||||
|       </div> | ||||
|       <todo-item-actions :todo="todo" class="sm:gl-order-3" /> | ||||
|       <todo-item-actions | ||||
|         :todo="todo" | ||||
|         class="sm:gl-order-3" | ||||
|         @change="(id, markedAsDone) => $emit('change', id, markedAsDone)" | ||||
|       /> | ||||
|       <todo-item-timestamp | ||||
|         :todo="todo" | ||||
|         class="gl-w-full gl-whitespace-nowrap gl-px-2 sm:gl-w-auto" | ||||
|         :class="{ 'gl-opacity-5': fadeTodo }" | ||||
|       /> | ||||
|     </gl-link> | ||||
|   </li> | ||||
|  |  | |||
|  | @ -3,7 +3,12 @@ import { GlButton, GlTooltipDirective } from '@gitlab/ui'; | |||
| import { reportToSentry } from '~/ci/utils'; | ||||
| import { s__ } from '~/locale'; | ||||
| import Tracking from '~/tracking'; | ||||
| import { INSTRUMENT_TODO_ITEM_CLICK, TODO_STATE_DONE, TODO_STATE_PENDING } from '../constants'; | ||||
| import { | ||||
|   INSTRUMENT_TODO_ITEM_CLICK, | ||||
|   TAB_ALL, | ||||
|   TODO_STATE_DONE, | ||||
|   TODO_STATE_PENDING, | ||||
| } from '../constants'; | ||||
| import markAsDoneMutation from './mutations/mark_as_done.mutation.graphql'; | ||||
| import markAsPendingMutation from './mutations/mark_as_pending.mutation.graphql'; | ||||
| 
 | ||||
|  | @ -15,6 +20,7 @@ export default { | |||
|     GlTooltip: GlTooltipDirective, | ||||
|   }, | ||||
|   mixins: [Tracking.mixin()], | ||||
|   inject: ['currentTab'], | ||||
|   props: { | ||||
|     todo: { | ||||
|       type: Object, | ||||
|  | @ -33,6 +39,14 @@ export default { | |||
|     isPending() { | ||||
|       return this.todo.state === TODO_STATE_PENDING; | ||||
|     }, | ||||
|     tooltipTitle() { | ||||
|       // Setting this to null while loading, combined with keeping the | ||||
|       // loading state till the item gets removed, prevents the tooltip | ||||
|       // text changing with the item state before the item gets removed. | ||||
|       if (this.isLoading) return null; | ||||
| 
 | ||||
|       return this.isDone ? this.$options.i18n.markAsPending : this.$options.i18n.markAsDone; | ||||
|     }, | ||||
|   }, | ||||
|   methods: { | ||||
|     showMarkAsDoneError() { | ||||
|  | @ -75,12 +89,22 @@ export default { | |||
|         if (data.errors?.length > 0) { | ||||
|           reportToSentry(this.$options.name, new Error(data.errors.join(', '))); | ||||
|           showError(); | ||||
|         } else { | ||||
|           this.$emit('change', this.todo.id, this.isDone); | ||||
|         } | ||||
|       } catch (failure) { | ||||
|         reportToSentry(this.$options.name, failure); | ||||
|         showError(); | ||||
|       } finally { | ||||
|         this.isLoading = false; | ||||
|       } finally { | ||||
|         // Only stop loading spinner when on "All" tab. | ||||
|         // On the other tabs (Pending/Done) we want the loading to continue | ||||
|         // until the todos query finished, removing this item from the list. | ||||
|         // This way we hide the state change, which would otherwise update | ||||
|         // the button's icon before it gets removed. | ||||
|         if (this.currentTab === TAB_ALL) { | ||||
|           this.isLoading = false; | ||||
|         } | ||||
|       } | ||||
|     }, | ||||
|   }, | ||||
|  | @ -97,7 +121,7 @@ export default { | |||
|     :icon="isDone ? 'redo' : 'check'" | ||||
|     :loading="isLoading" | ||||
|     :aria-label="isDone ? $options.i18n.markAsPending : $options.i18n.markAsDone" | ||||
|     :title="isDone ? $options.i18n.markAsPending : $options.i18n.markAsDone" | ||||
|     :title="tooltipTitle" | ||||
|     @click.prevent="toggleStatus" | ||||
|   /> | ||||
| </template> | ||||
|  |  | |||
|  | @ -1,4 +1,5 @@ | |||
| <script> | ||||
| import { computed } from 'vue'; | ||||
| import { GlLoadingIcon, GlKeysetPagination, GlLink, GlBadge, GlTab, GlTabs } from '@gitlab/ui'; | ||||
| import * as Sentry from '~/sentry/sentry_browser_wrapper'; | ||||
| import { createAlert } from '~/alert'; | ||||
|  | @ -11,6 +12,8 @@ import { | |||
| } from '~/todos/constants'; | ||||
| import getTodosQuery from './queries/get_todos.query.graphql'; | ||||
| import getPendingTodosCount from './queries/get_pending_todos_count.query.graphql'; | ||||
| import markAsDoneMutation from './mutations/mark_as_done.mutation.graphql'; | ||||
| import markAsPendingMutation from './mutations/mark_as_pending.mutation.graphql'; | ||||
| import TodoItem from './todo_item.vue'; | ||||
| import TodosEmptyState from './todos_empty_state.vue'; | ||||
| import TodosFilterBar, { SORT_OPTIONS } from './todos_filter_bar.vue'; | ||||
|  | @ -32,6 +35,11 @@ export default { | |||
|     TodosMarkAllDoneButton, | ||||
|   }, | ||||
|   mixins: [Tracking.mixin()], | ||||
|   provide() { | ||||
|     return { | ||||
|       currentTab: computed(() => this.currentTab), | ||||
|     }; | ||||
|   }, | ||||
|   data() { | ||||
|     return { | ||||
|       cursor: { | ||||
|  | @ -54,11 +62,13 @@ export default { | |||
|         sort: `${SORT_OPTIONS[0].value}_DESC`, | ||||
|       }, | ||||
|       alert: null, | ||||
|       showSpinnerWhileLoading: true, | ||||
|     }; | ||||
|   }, | ||||
|   apollo: { | ||||
|     todos: { | ||||
|       query: getTodosQuery, | ||||
|       fetchPolicy: 'cache-and-network', | ||||
|       variables() { | ||||
|         return { | ||||
|           state: this.statusByTab, | ||||
|  | @ -107,9 +117,6 @@ export default { | |||
|     showMarkAllAsDone() { | ||||
|       return this.currentTab === 0 && !this.showEmptyState; | ||||
|     }, | ||||
|     fadeDoneTodo() { | ||||
|       return this.currentTab === 0; | ||||
|     }, | ||||
|   }, | ||||
|   methods: { | ||||
|     nextPage(item) { | ||||
|  | @ -144,9 +151,34 @@ export default { | |||
|       this.alert?.dismiss(); | ||||
|       this.queryFilterValues = { ...data }; | ||||
|     }, | ||||
|     async handleItemChanged(id, markedAsDone) { | ||||
|       await this.updateAllQueries(false); | ||||
|       this.showUndoToast(id, markedAsDone); | ||||
|     }, | ||||
|     showUndoToast(todoId, markedAsDone) { | ||||
|       const message = markedAsDone ? s__('Todos|Marked as done') : s__('Todos|Marked as undone'); | ||||
|       const mutation = markedAsDone ? markAsPendingMutation : markAsDoneMutation; | ||||
| 
 | ||||
|       const { hide } = this.$toast.show(message, { | ||||
|         action: { | ||||
|           text: s__('Todos|Undo'), | ||||
|           onClick: async () => { | ||||
|             hide(); | ||||
|             await this.$apollo.mutate({ mutation, variables: { todoId } }); | ||||
|             this.updateAllQueries(false); | ||||
|           }, | ||||
|         }, | ||||
|       }); | ||||
|     }, | ||||
|     updateCounts() { | ||||
|       this.$apollo.queries.pendingTodosCount.refetch(); | ||||
|     }, | ||||
|     async updateAllQueries(showLoading = true) { | ||||
|       this.showSpinnerWhileLoading = showLoading; | ||||
|       this.updateCounts(); | ||||
|       await this.$apollo.queries.todos.refetch(); | ||||
|       this.showSpinnerWhileLoading = true; | ||||
|     }, | ||||
|   }, | ||||
| }; | ||||
| </script> | ||||
|  | @ -184,15 +216,17 @@ export default { | |||
| 
 | ||||
|     <div> | ||||
|       <div class="gl-flex gl-flex-col"> | ||||
|         <gl-loading-icon v-if="isLoading" size="lg" class="gl-mt-5" /> | ||||
|         <gl-loading-icon v-if="isLoading && showSpinnerWhileLoading" size="lg" class="gl-mt-5" /> | ||||
|         <ul v-else class="gl-m-0 gl-border-collapse gl-list-none gl-p-0"> | ||||
|           <todo-item | ||||
|             v-for="todo in todos" | ||||
|             :key="todo.id" | ||||
|             :todo="todo" | ||||
|             :current-user-id="currentUserId" | ||||
|             :fade-done-todo="fadeDoneTodo" | ||||
|           /> | ||||
|           <transition-group name="todos"> | ||||
|             <todo-item | ||||
|               v-for="todo in todos" | ||||
|               :key="todo.id" | ||||
|               :todo="todo" | ||||
|               :current-user-id="currentUserId" | ||||
|               @change="handleItemChanged" | ||||
|             /> | ||||
|           </transition-group> | ||||
|         </ul> | ||||
| 
 | ||||
|         <todos-empty-state v-if="showEmptyState" :is-filtered="isFiltered" /> | ||||
|  | @ -214,3 +248,17 @@ export default { | |||
|     </div> | ||||
|   </div> | ||||
| </template> | ||||
| 
 | ||||
| <style> | ||||
| .todos-leave-active { | ||||
|   transition: transform 0.15s ease-out; | ||||
|   position: absolute; | ||||
| } | ||||
| .todos-leave-to { | ||||
|   opacity: 0; | ||||
|   transform: translateY(-100px); | ||||
| } | ||||
| .todos-move { | ||||
|   transition: transform 0.15s ease-out; | ||||
| } | ||||
| </style> | ||||
|  |  | |||
|  | @ -36,6 +36,7 @@ export const TODO_EMPTY_TITLE_POOL = [ | |||
| ]; | ||||
| 
 | ||||
| export const STATUS_BY_TAB = [['pending'], ['done'], ['pending', 'done']]; | ||||
| export const TAB_ALL = 2; | ||||
| 
 | ||||
| /** | ||||
|  * Instrumentation | ||||
|  |  | |||
|  | @ -9,7 +9,7 @@ class Dashboard::TodosController < Dashboard::ApplicationController | |||
|   before_action :authorize_read_group!, only: :index | ||||
|   before_action :find_todos, only: [:index, :destroy_all] | ||||
| 
 | ||||
|   feature_category :team_planning | ||||
|   feature_category :notifications | ||||
|   urgency :low | ||||
| 
 | ||||
|   def index | ||||
|  |  | |||
|  | @ -7,7 +7,7 @@ class Projects::PipelinesController < Projects::ApplicationController | |||
| 
 | ||||
|   urgency :low, [ | ||||
|     :index, :new, :builds, :show, :failures, :create, | ||||
|     :stage, :retry, :dag, :cancel, :test_report, | ||||
|     :stage, :retry, :cancel, :test_report, | ||||
|     :charts, :destroy, :status, :manual_variables | ||||
|   ] | ||||
| 
 | ||||
|  | @ -27,7 +27,7 @@ class Projects::PipelinesController < Projects::ApplicationController | |||
|   before_action :authorize_cancel_pipeline!, only: [:cancel] | ||||
|   before_action :ensure_pipeline, only: [:show, :downloadable_artifacts] | ||||
|   before_action :reject_if_build_artifacts_size_refreshing!, only: [:destroy] | ||||
|   before_action only: [:show, :dag, :builds, :failures, :test_report, :manual_variables] do | ||||
|   before_action only: [:show, :builds, :failures, :test_report, :manual_variables] do | ||||
|     push_frontend_feature_flag(:ci_show_manual_variables_in_pipeline, project) | ||||
|   end | ||||
| 
 | ||||
|  | @ -54,7 +54,7 @@ class Projects::PipelinesController < Projects::ApplicationController | |||
| 
 | ||||
|   feature_category :continuous_integration, [ | ||||
|     :charts, :show, :stage, :cancel, :retry, | ||||
|     :builds, :dag, :failures, :status, | ||||
|     :builds, :failures, :status, | ||||
|     :index, :new, :destroy, :manual_variables | ||||
|   ] | ||||
|   feature_category :pipeline_composition, [:create] | ||||
|  | @ -144,19 +144,6 @@ class Projects::PipelinesController < Projects::ApplicationController | |||
|     render_show | ||||
|   end | ||||
| 
 | ||||
|   def dag | ||||
|     respond_to do |format| | ||||
|       format.html do | ||||
|         render_show | ||||
|       end | ||||
|       format.json do | ||||
|         render json: Ci::DagPipelineSerializer | ||||
|           .new(project: @project, current_user: @current_user) | ||||
|           .represent(@pipeline) | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   def failures | ||||
|     if @pipeline.failed_builds.present? | ||||
|       render_show | ||||
|  |  | |||
|  | @ -6,7 +6,7 @@ class Projects::TodosController < Projects::ApplicationController | |||
| 
 | ||||
|   before_action :authenticate_user!, only: [:create] | ||||
| 
 | ||||
|   feature_category :team_planning | ||||
|   feature_category :notifications | ||||
|   urgency :low | ||||
| 
 | ||||
|   private | ||||
|  |  | |||
|  | @ -35,30 +35,21 @@ module Resolvers | |||
|         return unless runner.project_type? | ||||
| 
 | ||||
|         BatchLoader::GraphQL.for(runner.id).batch do |runner_ids, loader| | ||||
|           # rubocop: disable CodeReuse/ActiveRecord | ||||
|           runner_and_projects_with_row_number = | ||||
|             ::Ci::RunnerProject | ||||
|               .where(runner_id: runner_ids) | ||||
|               .select('id, runner_id, project_id, ROW_NUMBER() OVER (PARTITION BY runner_id ORDER BY id ASC)') | ||||
|           runner_and_owner_projects = | ||||
|             ::Ci::RunnerProject | ||||
|               .select(:id, :runner_id, :project_id) | ||||
|               .from("(#{runner_and_projects_with_row_number.to_sql}) temp WHERE row_number = 1") | ||||
|           owner_project_id_by_runner_id = | ||||
|             runner_and_owner_projects | ||||
|               .group_by(&:runner_id) | ||||
|               .transform_values { |runner_projects| runner_projects.first.project_id } | ||||
|           project_ids = owner_project_id_by_runner_id.values.uniq | ||||
|           # rubocop: disable CodeReuse/ActiveRecord -- this runs on a limited number of records | ||||
|           runner_id_to_owner_id = | ||||
|             ::Ci::Runner.project_type.id_in(runner_ids) | ||||
|               .pluck(:id, :sharding_key_id) | ||||
|               .to_h | ||||
|           # rubocop: enable CodeReuse/ActiveRecord | ||||
| 
 | ||||
|           projects = apply_lookahead(Project.id_in(project_ids)) | ||||
|           projects = apply_lookahead(Project.id_in(runner_id_to_owner_id.values)) | ||||
|           Preloaders::ProjectPolicyPreloader.new(projects, current_user).execute | ||||
|           projects_by_id = projects.index_by(&:id) | ||||
| 
 | ||||
|           runner_ids.each do |runner_id| | ||||
|             owner_project_id = owner_project_id_by_runner_id[runner_id] | ||||
|             owner_project_id = runner_id_to_owner_id[runner_id] | ||||
|             loader.call(runner_id, projects_by_id[owner_project_id]) | ||||
|           end | ||||
|           # rubocop: enable CodeReuse/ActiveRecord | ||||
|         end | ||||
|       end | ||||
|     end | ||||
|  |  | |||
|  | @ -48,15 +48,10 @@ module Ci | |||
|           request | ||||
|         end | ||||
| 
 | ||||
|         def pipeline_creating_for_merge_request?(merge_request, delete_if_all_complete: false) | ||||
|         def pipeline_creating_for_merge_request?(merge_request) | ||||
|           key = merge_request_key(merge_request) | ||||
| 
 | ||||
|           requests, _del_result = Gitlab::Redis::SharedState.with do |redis| | ||||
|             redis.multi do |transaction| | ||||
|               transaction.hvals(key) | ||||
|               transaction.del(key) if delete_if_all_complete | ||||
|             end | ||||
|           end | ||||
|           requests = Gitlab::Redis::SharedState.with { |redis| redis.hvals(key) } | ||||
| 
 | ||||
|           return false unless requests.present? | ||||
| 
 | ||||
|  |  | |||
|  | @ -331,19 +331,18 @@ module Ci | |||
|     end | ||||
| 
 | ||||
|     def runner_matcher | ||||
|       strong_memoize(:runner_matcher) do | ||||
|         Gitlab::Ci::Matching::RunnerMatcher.new({ | ||||
|           runner_ids: [id], | ||||
|           runner_type: runner_type, | ||||
|           public_projects_minutes_cost_factor: public_projects_minutes_cost_factor, | ||||
|           private_projects_minutes_cost_factor: private_projects_minutes_cost_factor, | ||||
|           run_untagged: run_untagged, | ||||
|           access_level: access_level, | ||||
|           tag_list: tag_list, | ||||
|           allowed_plan_ids: allowed_plan_ids | ||||
|         }) | ||||
|       end | ||||
|       Gitlab::Ci::Matching::RunnerMatcher.new({ | ||||
|         runner_ids: [id], | ||||
|         runner_type: runner_type, | ||||
|         public_projects_minutes_cost_factor: public_projects_minutes_cost_factor, | ||||
|         private_projects_minutes_cost_factor: private_projects_minutes_cost_factor, | ||||
|         run_untagged: run_untagged, | ||||
|         access_level: access_level, | ||||
|         tag_list: tag_list, | ||||
|         allowed_plan_ids: allowed_plan_ids | ||||
|       }) | ||||
|     end | ||||
|     strong_memoize_attr :runner_matcher | ||||
| 
 | ||||
|     def assign_to(project, current_user = nil) | ||||
|       if instance_type? | ||||
|  | @ -354,7 +353,11 @@ module Ci | |||
| 
 | ||||
|       begin | ||||
|         transaction do | ||||
|           self.sharding_key_id = project.id if self.runner_projects.empty? | ||||
|           if self.runner_projects.empty? | ||||
|             self.sharding_key_id = project.id | ||||
|             self.clear_memoization(:owner) | ||||
|           end | ||||
| 
 | ||||
|           self.runner_projects << ::Ci::RunnerProject.new(project: project, runner: self) | ||||
|           self.save! | ||||
|         end | ||||
|  | @ -384,14 +387,20 @@ module Ci | |||
|       end | ||||
|     end | ||||
| 
 | ||||
|     def owner_project | ||||
|       return unless project_type? | ||||
| 
 | ||||
|       runner_projects.order(:id).first&.project | ||||
|     def owner | ||||
|       case runner_type | ||||
|       when 'instance_type' | ||||
|         ::User.find_by_id(creator_id) | ||||
|       when 'group_type' | ||||
|         ::Group.find_by_id(sharding_key_id) | ||||
|       when 'project_type' | ||||
|         ::Project.find_by_id(sharding_key_id) | ||||
|       end | ||||
|     end | ||||
|     strong_memoize_attr :owner | ||||
| 
 | ||||
|     def belongs_to_one_project? | ||||
|       runner_projects.count == 1 | ||||
|       runner_projects.limit(2).count(:all) == 1 | ||||
|     end | ||||
| 
 | ||||
|     def belongs_to_more_than_one_project? | ||||
|  | @ -497,10 +506,9 @@ module Ci | |||
|     end | ||||
| 
 | ||||
|     def namespace_ids | ||||
|       strong_memoize(:namespace_ids) do | ||||
|         runner_namespaces.pluck(:namespace_id).compact | ||||
|       end | ||||
|       runner_namespaces.pluck(:namespace_id).compact | ||||
|     end | ||||
|     strong_memoize_attr :namespace_ids | ||||
| 
 | ||||
|     def compute_token_expiration | ||||
|       case runner_type | ||||
|  |  | |||
|  | @ -2440,7 +2440,7 @@ class MergeRequest < ApplicationRecord | |||
|   def pipeline_creating? | ||||
|     return false unless Feature.enabled?(:ci_redis_pipeline_creations, project) | ||||
| 
 | ||||
|     Ci::PipelineCreation::Requests.pipeline_creating_for_merge_request?(self, delete_if_all_complete: true) | ||||
|     Ci::PipelineCreation::Requests.pipeline_creating_for_merge_request?(self) | ||||
|   end | ||||
| 
 | ||||
|   def merge_base_pipelines | ||||
|  |  | |||
|  | @ -1,12 +0,0 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| module Ci | ||||
|   class DagJobEntity < Grape::Entity | ||||
|     expose :name | ||||
|     expose :scheduling_type | ||||
| 
 | ||||
|     expose :needs, if: ->(job, _) { job.scheduling_type_dag? } do |job| | ||||
|       job.needs.pluck(:name) # rubocop: disable CodeReuse/ActiveRecord | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -1,9 +0,0 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| module Ci | ||||
|   class DagJobGroupEntity < Grape::Entity | ||||
|     expose :name | ||||
|     expose :size | ||||
|     expose :jobs, with: Ci::DagJobEntity | ||||
|   end | ||||
| end | ||||
|  | @ -1,20 +0,0 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| module Ci | ||||
|   class DagPipelineEntity < Grape::Entity | ||||
|     expose :stages_with_preloads, as: :stages, using: Ci::DagStageEntity | ||||
| 
 | ||||
|     private | ||||
| 
 | ||||
|     def stages_with_preloads | ||||
|       object.stages.preload(preloaded_relations) # rubocop: disable CodeReuse/ActiveRecord | ||||
|     end | ||||
| 
 | ||||
|     def preloaded_relations | ||||
|       [ | ||||
|         :project, | ||||
|         { latest_statuses: :needs } | ||||
|       ] | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -1,7 +0,0 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| module Ci | ||||
|   class DagPipelineSerializer < BaseSerializer | ||||
|     entity Ci::DagPipelineEntity | ||||
|   end | ||||
| end | ||||
|  | @ -1,9 +0,0 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| module Ci | ||||
|   class DagStageEntity < Grape::Entity | ||||
|     expose :name | ||||
| 
 | ||||
|     expose :groups, with: Ci::DagJobGroupEntity | ||||
|   end | ||||
| end | ||||
|  | @ -73,7 +73,9 @@ module Ci | |||
|     end | ||||
| 
 | ||||
|     def path_to(route, job, params = {}) | ||||
|       send("#{route}_path", job.project.namespace, job.project, job, params) # rubocop:disable GitlabSecurity/PublicSend | ||||
|       # rubocop:disable GitlabSecurity/PublicSend -- needs send | ||||
|       send("#{route}_path", job.project.namespace, job.project, job, params) | ||||
|       # rubocop:enable GitlabSecurity/PublicSend | ||||
|     end | ||||
| 
 | ||||
|     def job_path(job) | ||||
|  |  | |||
|  | @ -45,7 +45,7 @@ module Ci | |||
|             reason: :not_authorized_to_add_runner_in_project) | ||||
|         end | ||||
| 
 | ||||
|         if runner.owner_project && project.organization_id != runner.owner_project.organization_id | ||||
|         if runner.owner && project.organization_id != runner.owner.organization_id | ||||
|           return ServiceResponse.error(message: _('runner can only be assigned to projects in the same organization'), | ||||
|             reason: :project_not_in_same_organization) | ||||
|         end | ||||
|  |  | |||
|  | @ -26,13 +26,11 @@ module Ci | |||
|       private | ||||
| 
 | ||||
|       def set_associated_projects | ||||
|         new_project_ids = [runner.owner_project&.id].compact + project_ids | ||||
|         new_project_ids = [runner.owner&.id].compact + project_ids | ||||
| 
 | ||||
|         response = ServiceResponse.success | ||||
|         runner.transaction do | ||||
|           # rubocop:disable CodeReuse/ActiveRecord | ||||
|           current_project_ids = runner.projects.ids | ||||
|           # rubocop:enable CodeReuse/ActiveRecord | ||||
|           current_project_ids = runner.project_ids # rubocop:disable CodeReuse/ActiveRecord -- reasonable use | ||||
| 
 | ||||
|           response = associate_new_projects(new_project_ids, current_project_ids) | ||||
|           response = disassociate_old_projects(new_project_ids, current_project_ids) if response.success? | ||||
|  |  | |||
|  | @ -33,9 +33,9 @@ module Ci | |||
|         kwargs = { user: current_user } | ||||
|         case runner.runner_type | ||||
|         when 'group_type' | ||||
|           kwargs[:namespace] = runner.groups.first | ||||
|           kwargs[:namespace] = runner.owner | ||||
|         when 'project_type' | ||||
|           kwargs[:project] = runner.owner_project | ||||
|           kwargs[:project] = runner.owner | ||||
|         end | ||||
| 
 | ||||
|         track_internal_event( | ||||
|  |  | |||
|  | @ -26,6 +26,8 @@ module MergeRequests | |||
|         invalidate_cache_counts(merge_request, users: merge_request.assignees) | ||||
|       end | ||||
| 
 | ||||
|       invalidate_cache_counts(merge_request, users: old_assignees) | ||||
| 
 | ||||
|       execute_assignees_hooks(merge_request, old_assignees) if options['execute_hooks'] | ||||
|     end | ||||
| 
 | ||||
|  |  | |||
|  | @ -2318,7 +2318,7 @@ | |||
|   :tags: [] | ||||
| - :name: todos_destroyer:todos_destroyer_confidential_issue | ||||
|   :worker_name: TodosDestroyer::ConfidentialIssueWorker | ||||
|   :feature_category: :team_planning | ||||
|   :feature_category: :notifications | ||||
|   :has_external_dependencies: false | ||||
|   :urgency: :low | ||||
|   :resource_boundary: :unknown | ||||
|  | @ -2327,7 +2327,7 @@ | |||
|   :tags: [] | ||||
| - :name: todos_destroyer:todos_destroyer_destroyed_designs | ||||
|   :worker_name: TodosDestroyer::DestroyedDesignsWorker | ||||
|   :feature_category: :team_planning | ||||
|   :feature_category: :notifications | ||||
|   :has_external_dependencies: false | ||||
|   :urgency: :low | ||||
|   :resource_boundary: :unknown | ||||
|  | @ -2336,7 +2336,7 @@ | |||
|   :tags: [] | ||||
| - :name: todos_destroyer:todos_destroyer_destroyed_issuable | ||||
|   :worker_name: TodosDestroyer::DestroyedIssuableWorker | ||||
|   :feature_category: :team_planning | ||||
|   :feature_category: :notifications | ||||
|   :has_external_dependencies: false | ||||
|   :urgency: :low | ||||
|   :resource_boundary: :unknown | ||||
|  | @ -2345,7 +2345,7 @@ | |||
|   :tags: [] | ||||
| - :name: todos_destroyer:todos_destroyer_entity_leave | ||||
|   :worker_name: TodosDestroyer::EntityLeaveWorker | ||||
|   :feature_category: :team_planning | ||||
|   :feature_category: :notifications | ||||
|   :has_external_dependencies: false | ||||
|   :urgency: :low | ||||
|   :resource_boundary: :unknown | ||||
|  | @ -2354,7 +2354,7 @@ | |||
|   :tags: [] | ||||
| - :name: todos_destroyer:todos_destroyer_group_private | ||||
|   :worker_name: TodosDestroyer::GroupPrivateWorker | ||||
|   :feature_category: :team_planning | ||||
|   :feature_category: :notifications | ||||
|   :has_external_dependencies: false | ||||
|   :urgency: :low | ||||
|   :resource_boundary: :unknown | ||||
|  | @ -2363,7 +2363,7 @@ | |||
|   :tags: [] | ||||
| - :name: todos_destroyer:todos_destroyer_private_features | ||||
|   :worker_name: TodosDestroyer::PrivateFeaturesWorker | ||||
|   :feature_category: :team_planning | ||||
|   :feature_category: :notifications | ||||
|   :has_external_dependencies: false | ||||
|   :urgency: :low | ||||
|   :resource_boundary: :unknown | ||||
|  | @ -2372,7 +2372,7 @@ | |||
|   :tags: [] | ||||
| - :name: todos_destroyer:todos_destroyer_project_private | ||||
|   :worker_name: TodosDestroyer::ProjectPrivateWorker | ||||
|   :feature_category: :team_planning | ||||
|   :feature_category: :notifications | ||||
|   :has_external_dependencies: false | ||||
|   :urgency: :low | ||||
|   :resource_boundary: :unknown | ||||
|  |  | |||
|  | @ -8,6 +8,6 @@ module TodosDestroyerQueue | |||
| 
 | ||||
|   included do | ||||
|     queue_namespace :todos_destroyer | ||||
|     feature_category :team_planning | ||||
|     feature_category :notifications | ||||
|   end | ||||
| end | ||||
|  |  | |||
|  | @ -6,7 +6,7 @@ column: default_project_creation | |||
| db_type: integer | ||||
| default: '2' | ||||
| description: 'Default project creation protection. Can take: `0` _(No one)_, `1` _(Maintainers)_, | ||||
|   `2` _(Developers + Maintainers)_ or `3` _(Administrators)_' | ||||
|   `2` _(Developers + Maintainers)_, or `3` _(Administrators)_.' | ||||
| encrypted: false | ||||
| gitlab_com_different_than_default: false | ||||
| jihu: false | ||||
|  |  | |||
|  | @ -5,7 +5,8 @@ clusterwide: false | |||
| column: elasticsearch_retry_on_failure | ||||
| db_type: integer | ||||
| default: '0' | ||||
| description: Maximum number of possible retries for Elasticsearch search requests. Premium and Ultimate only. | ||||
| description: Maximum number of possible retries for Elasticsearch search requests. | ||||
|   Premium and Ultimate only. | ||||
| encrypted: false | ||||
| gitlab_com_different_than_default: false | ||||
| jihu: false | ||||
|  |  | |||
|  | @ -5,7 +5,7 @@ clusterwide: true | |||
| column: identity_verification_settings | ||||
| db_type: jsonb | ||||
| default: "'{}'::jsonb" | ||||
| description: 'Configuration settings related to identity verification' | ||||
| description: | ||||
| encrypted: false | ||||
| gitlab_com_different_than_default: true | ||||
| jihu: false | ||||
|  |  | |||
|  | @ -8,7 +8,9 @@ default: | |||
| description: Maximum allowable lifetime for access tokens in days. When left blank, | ||||
|   default value of 365 is applied. When set, value must be 365 or less. When changed, | ||||
|   existing access tokens with an expiration date beyond the maximum allowable lifetime | ||||
|   are revoked. Self-managed, Ultimate only. | ||||
|   are revoked. Self-managed, Ultimate only. In GitLab 17.6 or later, the maximum lifetime | ||||
|   limit can be [extended to 400 days](https://gitlab.com/gitlab-org/gitlab/-/issues/461901) | ||||
|   by enabling a [feature flag](../administration/feature_flags.md) named `buffered_token_expiration_limit`. | ||||
| encrypted: false | ||||
| gitlab_com_different_than_default: false | ||||
| jihu: false | ||||
|  |  | |||
|  | @ -6,7 +6,9 @@ column: max_ssh_key_lifetime | |||
| db_type: integer | ||||
| default: | ||||
| description: Maximum allowable lifetime for SSH keys in days. Self-managed, Ultimate | ||||
|   only. | ||||
|   only. In GitLab 17.6 or later, the maximum lifetime limit can be [extended to 400 | ||||
|   days](https://gitlab.com/gitlab-org/gitlab/-/issues/461901) by enabling a [feature | ||||
|   flag](../administration/feature_flags.md) named `buffered_token_expiration_limit`. | ||||
| encrypted: false | ||||
| gitlab_com_different_than_default: false | ||||
| jihu: false | ||||
|  |  | |||
|  | @ -5,7 +5,7 @@ clusterwide: false | |||
| column: sign_in_restrictions | ||||
| db_type: jsonb | ||||
| default: "'{}'::jsonb" | ||||
| description: "Settings related to sign-in restrictions" | ||||
| description: | ||||
| encrypted: false | ||||
| gitlab_com_different_than_default: true | ||||
| jihu: false | ||||
|  |  | |||
|  | @ -5,10 +5,7 @@ clusterwide: false | |||
| column: transactional_emails | ||||
| db_type: jsonb | ||||
| default: "'{}'::jsonb" | ||||
| description: > | ||||
|   Settings for transactional emails. | ||||
|   [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/168245) in | ||||
|   GitLab 17.6 to support options for group and project access token expiry. | ||||
| description: | ||||
| encrypted: false | ||||
| gitlab_com_different_than_default: false | ||||
| jihu: false | ||||
|  |  | |||
|  | @ -0,0 +1,9 @@ | |||
| --- | ||||
| name: expand_nested_variables_in_job_rules_exists_and_changes | ||||
| feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/327780 | ||||
| introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/166779 | ||||
| rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/483115 | ||||
| milestone: '17.6' | ||||
| group: group::pipeline authoring | ||||
| type: gitlab_com_derisk | ||||
| default_enabled: false | ||||
|  | @ -14,7 +14,6 @@ resources :pipelines, only: [:index, :new, :create, :show, :destroy] do | |||
|     post :cancel | ||||
|     post :retry | ||||
|     get :builds | ||||
|     get :dag | ||||
|     get :failures | ||||
|     get :status | ||||
|     get :test_report | ||||
|  |  | |||
|  | @ -10,3 +10,8 @@ | |||
|     [GitLab Runner documentation](https://docs.gitlab.com/runner/) | ||||
|   stage: verify | ||||
|   issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/387937 | ||||
|   window: "1" | ||||
|   impact: low | ||||
|   scope: [instance, group, project] | ||||
|   resolution_role: admin | ||||
|   manual_task: false | ||||
|  |  | |||
|  | @ -1,13 +0,0 @@ | |||
| - title: "List container registry repository tags API endpoint pagination" | ||||
|   announcement_milestone: "16.10" | ||||
|   removal_milestone: "18.0" | ||||
|   breaking_change: true | ||||
|   reporter: trizzi | ||||
|   stage: Package | ||||
|   issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/432470 | ||||
|   body: | | ||||
|     You can use the container registry REST API to [get a list of registry repository tags](https://docs.gitlab.com/ee/api/container_registry.html#list-registry-repository-tags). We plan to improve this endpoint, adding more metadata and new features like improved sorting and filtering. | ||||
| 
 | ||||
|     While offset-based pagination was already available for this endpoint, keyset-based pagination was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/432470) in GitLab 16.10 for GitLab.com only. This is now the preferred pagination method. | ||||
| 
 | ||||
|     Offset-based pagination for the [List registry repository tags](https://docs.gitlab.com/ee/api/container_registry.html#list-registry-repository-tags) endpoint is deprecated in GitLab 16.10 and will be removed in 18.0. Instead, use the keyset-based pagination. | ||||
|  | @ -3,7 +3,7 @@ table_name: todos | |||
| classes: | ||||
| - Todo | ||||
| feature_categories: | ||||
| - team_planning | ||||
| - notifications | ||||
| description: >- | ||||
|   An action required or notification of action taken for a user on a target object, generated by various actions within the | ||||
|   GitLab application | ||||
|  |  | |||
|  | @ -0,0 +1,10 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| class AddMetadataToZoektIndices < Gitlab::Database::Migration[2.2] | ||||
|   enable_lock_retries! | ||||
|   milestone '17.6' | ||||
| 
 | ||||
|   def change | ||||
|     add_column :zoekt_indices, :metadata, :jsonb, default: {}, null: false | ||||
|   end | ||||
| end | ||||
|  | @ -0,0 +1 @@ | |||
| 7bc71fb040af525f9592177bfe35225a44d84b1a85d8c5bc669664c00303bafc | ||||
|  | @ -22005,7 +22005,8 @@ CREATE TABLE zoekt_indices ( | |||
|     zoekt_replica_id bigint, | ||||
|     reserved_storage_bytes bigint DEFAULT '10737418240'::bigint, | ||||
|     used_storage_bytes bigint DEFAULT 0 NOT NULL, | ||||
|     watermark_level smallint DEFAULT 0 NOT NULL | ||||
|     watermark_level smallint DEFAULT 0 NOT NULL, | ||||
|     metadata jsonb DEFAULT '{}'::jsonb NOT NULL | ||||
| ); | ||||
| 
 | ||||
| CREATE SEQUENCE zoekt_indices_id_seq | ||||
|  |  | |||
|  | @ -47,14 +47,18 @@ supporting custom domains a secondary IP is not needed. | |||
| 
 | ||||
| ## Prerequisites | ||||
| 
 | ||||
| Before proceeding with the Pages configuration, you must: | ||||
| This section describes the prerequisites for configuring GitLab Pages. | ||||
| 
 | ||||
| ### Wildcard domains | ||||
| 
 | ||||
| Before configuring Pages for wildcard domains, you must: | ||||
| 
 | ||||
| 1. Have a domain for Pages that is not a subdomain of your GitLab instance domain. | ||||
| 
 | ||||
|    | GitLab domain | Pages domain | Does it work? | | ||||
|    | :---: | :---: | :---: | | ||||
|    | `example.com` | `example.io` | **{check-circle}** Yes | | ||||
|    | `example.com` | `pages.example.com` | **{dotted-circle}** No | | ||||
|    | GitLab domain        | Pages domain        | Does it work? | | ||||
|    | -------------------- | ------------------- | ------------- | | ||||
|    | `example.com`        | `example.io`        | **{check-circle}** Yes | | ||||
|    | `example.com`        | `pages.example.com` | **{dotted-circle}** No | | ||||
|    | `gitlab.example.com` | `pages.example.com` | **{check-circle}** Yes | | ||||
| 
 | ||||
| 1. Configure a **wildcard DNS record**. | ||||
|  | @ -64,6 +68,24 @@ Before proceeding with the Pages configuration, you must: | |||
|    so that your users don't have to bring their own. | ||||
| 1. For custom domains, have a **secondary IP**. | ||||
| 
 | ||||
| ### Single-domain sites | ||||
| 
 | ||||
| Before configuring Pages for single-domain sites, you must: | ||||
| 
 | ||||
| 1. Have a domain for Pages that is not a subdomain of your GitLab instance domain. | ||||
| 
 | ||||
|    | GitLab domain        | Pages domain        | Supported | | ||||
|    | -------------------- | ------------------- | ------------- | | ||||
|    | `example.com`        | `example.io`        | **{check-circle}** Yes | | ||||
|    | `example.com`        | `pages.example.com` | **{dotted-circle}** No | | ||||
|    | `gitlab.example.com` | `pages.example.com` | **{check-circle}** Yes | | ||||
| 
 | ||||
| 1. Configure a **DNS record**. | ||||
| 1. Optional. If you decide to serve Pages under HTTPS, have a **TLS certificate** for that domain. | ||||
| 1. Optional but recommended. Enable [instance runners](../../ci/runners/index.md) | ||||
|    so that your users don't have to bring their own. | ||||
| 1. For custom domains, have a **secondary IP**. | ||||
| 
 | ||||
| NOTE: | ||||
| If your GitLab instance and the Pages daemon are deployed in a private network or behind a firewall, your GitLab Pages websites are only accessible to devices/users that have access to the private network. | ||||
| 
 | ||||
|  | @ -77,8 +99,8 @@ Suffix List prevents browsers from accepting | |||
| [supercookies](https://en.wikipedia.org/wiki/HTTP_cookie#Supercookie), | ||||
| among other things. | ||||
| 
 | ||||
| Follow [these instructions](https://publicsuffix.org/submit/) to submit your | ||||
| GitLab Pages subdomain. For instance, if your domain is `example.io`, you should | ||||
| To submit your GitLab Pages subdomain, follow [Submit amendments to the Public Suffix List](https://publicsuffix.org/submit/). | ||||
| For example, if your domain is `example.io`, you should | ||||
| request that `example.io` is added to the Public Suffix List. GitLab.com | ||||
| added `gitlab.io` [in 2016](https://gitlab.com/gitlab-com/gl-infra/reliability/-/issues/230). | ||||
| 
 | ||||
|  | @ -97,14 +119,14 @@ Where `example.io` is the domain GitLab Pages is served from, | |||
| `192.0.2.1` is the IPv4 address of your GitLab instance, and `2001:db8::1` is the | ||||
| IPv6 address. If you don't have IPv6, you can omit the `AAAA` record. | ||||
| 
 | ||||
| #### For namespace in URL path, without wildcard DNS | ||||
| #### DNS configuration for single-domain sites | ||||
| 
 | ||||
| > - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/17584) as an [experiment](../../policy/experiment-beta-support.md) in GitLab 16.7. | ||||
| > - [Moved](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/148621) to [beta](../../policy/experiment-beta-support.md) in GitLab 16.11. | ||||
| > - [Changed](https://gitlab.com/gitlab-org/gitlab-pages/-/issues/1111) implementation from NGINX to the GitLab Pages codebase in GitLab 17.2. | ||||
| > - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/483365) in GitLab 17.4. | ||||
| 
 | ||||
| If you need support for namespace in the URL path to remove the requirement for wildcard DNS: | ||||
| To configure GitLab Pages DNS for single-domain sites without wildcard DNS: | ||||
| 
 | ||||
| 1. Enable the GitLab Pages flag for this feature by adding | ||||
|    `gitlab_pages["namespace_in_path"] = true` to `/etc/gitlab/gitlab.rb`. | ||||
|  | @ -160,17 +182,18 @@ advanced one. | |||
| 
 | ||||
| ### Wildcard domains | ||||
| 
 | ||||
| The following configuration is the minimum setup to use GitLab Pages. | ||||
| It is the foundation for all other setups described here. | ||||
| In this configuration: | ||||
| 
 | ||||
| - NGINX proxies all requests to the GitLab Pages daemon. | ||||
| - The GitLab Pages daemon doesn't listen directly to the public internet. | ||||
| 
 | ||||
| Prerequisites: | ||||
| 
 | ||||
| - [Wildcard DNS setup](#dns-configuration) | ||||
| 
 | ||||
| --- | ||||
| 
 | ||||
| URL scheme: `http://<namespace>.example.io/<project_slug>` | ||||
| 
 | ||||
| The following is the minimum setup that you can use Pages with. It is the base for all | ||||
| other setups as described below. NGINX proxies all requests to the daemon. | ||||
| The Pages daemon doesn't listen to the outside world. | ||||
| To configure GitLab Pages to use wildcard domains: | ||||
| 
 | ||||
| 1. Set the external URL for GitLab Pages in `/etc/gitlab/gitlab.rb`: | ||||
| 
 | ||||
|  | @ -181,30 +204,38 @@ The Pages daemon doesn't listen to the outside world. | |||
| 
 | ||||
| 1. [Reconfigure GitLab](../restart_gitlab.md#reconfigure-a-linux-package-installation). | ||||
| 
 | ||||
| Watch the [video tutorial](https://youtu.be/dD8c7WNcc6s) for this configuration. | ||||
| The resulting URL scheme is `http://<namespace>.example.io/<project_slug>`. | ||||
| 
 | ||||
| ### Pages domain without wildcard DNS | ||||
| <i class="fa fa-youtube-play youtube" aria-hidden="true"></i> | ||||
| For an overview, see [How to Enable GitLab Pages for GitLab CE and EE](https://youtu.be/dD8c7WNcc6s). | ||||
| <!-- Video published on 2017-02-22 --> | ||||
| 
 | ||||
| ### Single-domain sites | ||||
| 
 | ||||
| > - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/17584) as an [experiment](../../policy/experiment-beta-support.md) in GitLab 16.7. | ||||
| > - [Moved](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/148621) to [beta](../../policy/experiment-beta-support.md) in GitLab 16.11. | ||||
| > - [Changed](https://gitlab.com/gitlab-org/gitlab-pages/-/issues/1111) implementation from NGINX to the GitLab Pages codebase in GitLab 17.2. | ||||
| > - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/483365) in GitLab 17.4. | ||||
| 
 | ||||
| This configuration is the minimum setup for GitLab Pages. It is the base for all | ||||
| other configurations. In this configuration, NGINX proxies all requests to the daemon, | ||||
| because the GitLab Pages daemon doesn't listen to the outside world. | ||||
| The following configuration is the minimum setup to use GitLab Pages. | ||||
| It is the foundation for all other setups described here. | ||||
| In this configuration: | ||||
| 
 | ||||
| - NGINX proxies all requests to the GitLab Pages daemon. | ||||
| - The GitLab Pages daemon doesn't listen directly to the public internet. | ||||
| 
 | ||||
| Prerequisites: | ||||
| 
 | ||||
| - You have configured DNS setup | ||||
|   [without a wildcard](#for-namespace-in-url-path-without-wildcard-dns). | ||||
| - You have configured DNS for | ||||
|   [single-domain sites](#dns-configuration-for-single-domain-sites). | ||||
| 
 | ||||
| To configure GitLab Pages to use single-domain sites: | ||||
| 
 | ||||
| 1. In `/etc/gitlab/gitlab.rb`, set the external URL for GitLab Pages, and enable the feature: | ||||
| 
 | ||||
|    ```ruby | ||||
|    # External_url here is only for reference | ||||
|    external_url "http://example.com" | ||||
|    pages_external_url 'http://example.io' | ||||
|    external_url "http://example.com" # Swap out this URL for your own | ||||
|    pages_external_url 'http://example.io' # Important: not a subdomain of external_url, so cannot be http://pages.example.com | ||||
| 
 | ||||
|    # Set this flag to enable this feature | ||||
|    gitlab_pages["namespace_in_path"] = true | ||||
|  | @ -216,9 +247,9 @@ The resulting URL scheme is `http://example.io/<namespace>/<project_slug>`. | |||
| 
 | ||||
| WARNING: | ||||
| GitLab Pages supports only one URL scheme at a time: | ||||
| with wildcard DNS, or without wildcard DNS. | ||||
| wildcard domains or single-domain sites. | ||||
| If you enable `namespace_in_path`, existing GitLab Pages websites | ||||
| are accessible only on domains without wildcard DNS. | ||||
| are accessible only on single-domain. | ||||
| 
 | ||||
| ### Wildcard domains with TLS support | ||||
| 
 | ||||
|  | @ -227,12 +258,8 @@ Prerequisites: | |||
| - [Wildcard DNS setup](#dns-configuration) | ||||
| - TLS certificate. Can be either Wildcard, or any other type meeting the [requirements](../../user/project/pages/custom_domains_ssl_tls_certification/index.md#manual-addition-of-ssltls-certificates). | ||||
| 
 | ||||
| --- | ||||
| 
 | ||||
| URL scheme: `https://<namespace>.example.io/<project_slug>` | ||||
| 
 | ||||
| NGINX proxies all requests to the daemon. Pages daemon doesn't listen to the | ||||
| outside world. | ||||
| public internet. | ||||
| 
 | ||||
| 1. Place the wildcard TLS certificate for `*.example.io` and the key inside `/etc/gitlab/ssl`. | ||||
| 1. In `/etc/gitlab/gitlab.rb` specify the following configuration: | ||||
|  | @ -257,6 +284,8 @@ outside world. | |||
|    [System OAuth application](../../integration/oauth_provider.md#create-an-instance-wide-application) | ||||
|    to use the HTTPS protocol. | ||||
| 
 | ||||
| The resulting URL scheme is `https://<namespace>.example.io/<project_slug>`. | ||||
| 
 | ||||
| WARNING: | ||||
| Multiple wildcards for one instance is not supported. Only one wildcard per instance can be assigned. | ||||
| 
 | ||||
|  | @ -266,7 +295,7 @@ Before you reconfigure, remove the `gitlab_pages` section from `/etc/gitlab/gitl | |||
| then run `gitlab-ctl reconfigure`. For more information, read | ||||
| [GitLab Pages does not regenerate OAuth](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/3947). | ||||
| 
 | ||||
| ### Pages domain with TLS support, without wildcard DNS | ||||
| ### Single-domain sites with TLS support | ||||
| 
 | ||||
| > - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/17584) as an [experiment](../../policy/experiment-beta-support.md) in GitLab 16.7. | ||||
| > - [Moved](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/148621) to [beta](../../policy/experiment-beta-support.md) in GitLab 16.11. | ||||
|  | @ -275,20 +304,19 @@ then run `gitlab-ctl reconfigure`. For more information, read | |||
| 
 | ||||
| Prerequisites: | ||||
| 
 | ||||
| - You have configured DNS setup | ||||
|   [without a wildcard](#for-namespace-in-url-path-without-wildcard-dns). | ||||
| - You have configured DNS for | ||||
|   [single-domain sites](#dns-configuration-for-single-domain-sites). | ||||
| - You have a TLS certificate that covers your domain (like `example.io`). | ||||
| 
 | ||||
| In this configuration, NGINX proxies all requests to the daemon. The GitLab Pages | ||||
| daemon doesn't listen to the outside world: | ||||
| daemon doesn't listen to the public internet: | ||||
| 
 | ||||
| 1. Add your TLS certificate and key as mentioned in the prerequisites into `/etc/gitlab/ssl`. | ||||
| 1. In `/etc/gitlab/gitlab.rb`, set the external URL for GitLab Pages, and enable the feature: | ||||
| 
 | ||||
|    ```ruby | ||||
|    # The external_url field is here only for reference. | ||||
|    external_url "https://example.com" | ||||
|    pages_external_url 'https://example.io' | ||||
|    external_url "https://example.com" # Swap out this URL for your own | ||||
|    pages_external_url 'https://example.io' # Important: not a subdomain of external_url, so cannot be https://pages.example.com | ||||
| 
 | ||||
|    pages_nginx['redirect_http_to_https'] = true | ||||
| 
 | ||||
|  | @ -322,9 +350,9 @@ The resulting URL scheme is `https://example.io/<namespace>/<project_slug>`. | |||
| 
 | ||||
| WARNING: | ||||
| GitLab Pages supports only one URL scheme at a time: | ||||
| with wildcard DNS, or without wildcard DNS. | ||||
| wildcard domains or single-domain sites. | ||||
| If you enable `namespace_in_path`, existing GitLab Pages websites | ||||
| are accessible only on domains without wildcard DNS. | ||||
| are accessible only as single-domain sites. | ||||
| 
 | ||||
| ### Wildcard domains with TLS-terminating Load Balancer | ||||
| 
 | ||||
|  | @ -333,10 +361,6 @@ Prerequisites: | |||
| - [Wildcard DNS setup](#dns-configuration) | ||||
| - [TLS-terminating load balancer](../../install/aws/index.md#load-balancer) | ||||
| 
 | ||||
| --- | ||||
| 
 | ||||
| URL scheme: `https://<namespace>.example.io/<project_slug>` | ||||
| 
 | ||||
| This setup is primarily intended to be used when [installing a GitLab POC on Amazon Web Services](../../install/aws/index.md). This includes a TLS-terminating [classic load balancer](../../install/aws/index.md#load-balancer) that listens for HTTPS connections, manages TLS certificates, and forwards HTTP traffic to the instance. | ||||
| 
 | ||||
| 1. In `/etc/gitlab/gitlab.rb` specify the following configuration: | ||||
|  | @ -353,6 +377,8 @@ This setup is primarily intended to be used when [installing a GitLab POC on Ama | |||
| 
 | ||||
| 1. [Reconfigure GitLab](../restart_gitlab.md#reconfigure-a-linux-package-installation). | ||||
| 
 | ||||
| The resulting URL scheme is `https://<namespace>.example.io/<project_slug>`. | ||||
| 
 | ||||
| ### Global settings | ||||
| 
 | ||||
| Below is a table of all configuration settings known to Pages in a Linux package installation, | ||||
|  | @ -403,7 +429,7 @@ control over how the Pages daemon runs and serves content in your environment. | |||
| | `log_directory`                         | Absolute path to a log directory.                                                                                                                                                                                                                                                                          | | ||||
| | `log_format`                            | The log output format: `text` or `json`.                                                                                                                                                                                                                                                                   | | ||||
| | `log_verbose`                           | Verbose logging, true/false.                                                                                                                                                                                                                                                                               | | ||||
| | `namespace_in_path`                     | (Beta) Enable or disable namespace in the URL path to support [without wildcard DNS setup](#for-namespace-in-url-path-without-wildcard-dns). Default: `false`.                                                                                                                                             | | ||||
| | `namespace_in_path`                     | Enable or disable namespace in the URL path to support [single-domain sites DNS setup](#dns-configuration-for-single-domain-sites). Default: `false`.                                                                                                                                             | | ||||
| | `propagate_correlation_id`              | Set to true (false by default) to re-use existing Correlation ID from the incoming request header `X-Request-ID` if present. If a reverse proxy sets this header, the value is propagated in the request chain.                                                                                            | | ||||
| | `max_connections`                       | Limit on the number of concurrent connections to the HTTP, HTTPS or proxy listeners.                                                                                                                                                                                                                       | | ||||
| | `max_uri_length`                        | The maximum length of URIs accepted by GitLab Pages. Set to 0 for unlimited length.                                                                                                                                                                                                                        | | ||||
|  | @ -459,10 +485,6 @@ Prerequisites: | |||
| - [Wildcard DNS setup](#dns-configuration) | ||||
| - Secondary IP | ||||
| 
 | ||||
| --- | ||||
| 
 | ||||
| URL scheme: `http://<namespace>.example.io/<project_slug>` and `http://custom-domain.com` | ||||
| 
 | ||||
| In that case, the Pages daemon is running, NGINX still proxies requests to | ||||
| the daemon but the daemon is also able to receive requests from the outside | ||||
| world. Custom domains are supported, but no TLS. | ||||
|  | @ -481,6 +503,8 @@ world. Custom domains are supported, but no TLS. | |||
| 
 | ||||
| 1. [Reconfigure GitLab](../restart_gitlab.md#reconfigure-a-linux-package-installation). | ||||
| 
 | ||||
| The resulting URL schemes are `http://<namespace>.example.io/<project_slug>` and `http://custom-domain.com`. | ||||
| 
 | ||||
| ### Custom domains with TLS support | ||||
| 
 | ||||
| Prerequisites: | ||||
|  | @ -489,10 +513,6 @@ Prerequisites: | |||
| - TLS certificate. Can be either Wildcard, or any other type meeting the [requirements](../../user/project/pages/custom_domains_ssl_tls_certification/index.md#manual-addition-of-ssltls-certificates). | ||||
| - Secondary IP | ||||
| 
 | ||||
| --- | ||||
| 
 | ||||
| URL scheme: `https://<namespace>.example.io/<project_slug>` and `https://custom-domain.com` | ||||
| 
 | ||||
| In that case, the Pages daemon is running, NGINX still proxies requests to | ||||
| the daemon but the daemon is also able to receive requests from the outside | ||||
| world. Custom domains and TLS are supported. | ||||
|  | @ -526,6 +546,8 @@ world. Custom domains and TLS are supported. | |||
|    [System OAuth application](../../integration/oauth_provider.md#create-an-instance-wide-application) | ||||
|    to use the HTTPS protocol. | ||||
| 
 | ||||
| The resulting URL schemes are `https://<namespace>.example.io/<project_slug>` and `https://custom-domain.com`. | ||||
| 
 | ||||
| ### Custom domain verification | ||||
| 
 | ||||
| To prevent malicious users from hijacking domains that don't belong to them, | ||||
|  |  | |||
|  | @ -39,7 +39,7 @@ Before setting up a self-hosted model infrastructure, you must have: | |||
| - A [supported model](supported_models_and_hardware_requirements.md) (either cloud-based or on-premises). | ||||
| - A [supported serving platform](supported_llm_serving_platforms.md) (either cloud-based or on-premises). | ||||
| - A locally hosted or GitLab.com AI Gateway. | ||||
| - GitLab [Enterprise Edition license](../../administration/license.md). | ||||
| - GitLab Ultimate + [Duo Enterprise license](https://about.gitlab.com/solutions/gitlab-duo-pro/sales/?toggle=gitlab-duo-pro). | ||||
| 
 | ||||
| ## Choose a configuration type | ||||
| 
 | ||||
|  |  | |||
|  | @ -16,7 +16,7 @@ DETAILS: | |||
| > - [Enabled on self-managed](https://gitlab.com/groups/gitlab-org/-/epics/15176) in GitLab 17.6. | ||||
| > - Changed to require GitLab Duo add-on in GitLab 17.6 and later. | ||||
| 
 | ||||
| To deploy self-hosted AI models, you need **GitLab Enterprise Edition**. For more information about licensing, refer to the [licensing documentation](../../administration/license.md). | ||||
| To deploy self-hosted AI models, you need GitLab Ultimate and Duo Enterprise - [Start a trial](https://about.gitlab.com/solutions/gitlab-duo-pro/sales/?type=free-trial). | ||||
| 
 | ||||
| ## Offerings | ||||
| 
 | ||||
|  |  | |||
|  | @ -231,6 +231,9 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>" \ | |||
| 
 | ||||
| Get a list of tags for given registry repository. | ||||
| 
 | ||||
| NOTE: | ||||
| Offset pagination is deprecated and keyset pagination is now the preferred pagination method. | ||||
| 
 | ||||
| ```plaintext | ||||
| GET /projects/:id/registry/repositories/:repository_id/tags | ||||
| ``` | ||||
|  |  | |||
|  | @ -12441,6 +12441,30 @@ The edge type for [`CiMinutesProjectMonthlyUsage`](#ciminutesprojectmonthlyusage | |||
| | <a id="ciminutesprojectmonthlyusageedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. | | ||||
| | <a id="ciminutesprojectmonthlyusageedgenode"></a>`node` | [`CiMinutesProjectMonthlyUsage`](#ciminutesprojectmonthlyusage) | The item at the end of the edge. | | ||||
| 
 | ||||
| #### `CiProjectSubscriptionConnection` | ||||
| 
 | ||||
| The connection type for [`CiProjectSubscription`](#ciprojectsubscription). | ||||
| 
 | ||||
| ##### Fields | ||||
| 
 | ||||
| | Name | Type | Description | | ||||
| | ---- | ---- | ----------- | | ||||
| | <a id="ciprojectsubscriptionconnectioncount"></a>`count` | [`Int!`](#int) | Total count of collection. | | ||||
| | <a id="ciprojectsubscriptionconnectionedges"></a>`edges` | [`[CiProjectSubscriptionEdge]`](#ciprojectsubscriptionedge) | A list of edges. | | ||||
| | <a id="ciprojectsubscriptionconnectionnodes"></a>`nodes` | [`[CiProjectSubscription]`](#ciprojectsubscription) | A list of nodes. | | ||||
| | <a id="ciprojectsubscriptionconnectionpageinfo"></a>`pageInfo` | [`PageInfo!`](#pageinfo) | Information to aid in pagination. | | ||||
| 
 | ||||
| #### `CiProjectSubscriptionEdge` | ||||
| 
 | ||||
| The edge type for [`CiProjectSubscription`](#ciprojectsubscription). | ||||
| 
 | ||||
| ##### Fields | ||||
| 
 | ||||
| | Name | Type | Description | | ||||
| | ---- | ---- | ----------- | | ||||
| | <a id="ciprojectsubscriptionedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. | | ||||
| | <a id="ciprojectsubscriptionedgenode"></a>`node` | [`CiProjectSubscription`](#ciprojectsubscription) | The item at the end of the edge. | | ||||
| 
 | ||||
| #### `CiProjectVariableConnection` | ||||
| 
 | ||||
| The connection type for [`CiProjectVariable`](#ciprojectvariable). | ||||
|  | @ -19990,6 +20014,17 @@ CI/CD variables given to a manual job. | |||
| | <a id="ciminutesprojectmonthlyusageproject"></a>`project` | [`Project`](#project) | Project having the recorded usage. | | ||||
| | <a id="ciminutesprojectmonthlyusagesharedrunnersduration"></a>`sharedRunnersDuration` | [`Int`](#int) | Total duration (in seconds) of shared runners use by the project for the month. | | ||||
| 
 | ||||
| ### `CiProjectSubscription` | ||||
| 
 | ||||
| #### Fields | ||||
| 
 | ||||
| | Name | Type | Description | | ||||
| | ---- | ---- | ----------- | | ||||
| | <a id="ciprojectsubscriptionauthor"></a>`author` | [`UserCore`](#usercore) | Author of the subscription. | | ||||
| | <a id="ciprojectsubscriptiondownstreamproject"></a>`downstreamProject` | [`CiSubscriptionsProjectDetails`](#cisubscriptionsprojectdetails) | Downstream project of the subscription.When an upstream project's pipeline completes, a pipeline is triggered in the downstream project. | | ||||
| | <a id="ciprojectsubscriptionid"></a>`id` | [`CiSubscriptionsProjectID`](#cisubscriptionsprojectid) | Global ID of the subscription. | | ||||
| | <a id="ciprojectsubscriptionupstreamproject"></a>`upstreamProject` | [`CiSubscriptionsProjectDetails`](#cisubscriptionsprojectdetails) | Upstream project of the subscription.When an upstream project's pipeline completes, a pipeline is triggered in the downstream project. | | ||||
| 
 | ||||
| ### `CiProjectVariable` | ||||
| 
 | ||||
| CI/CD variables for a project. | ||||
|  | @ -20290,6 +20325,26 @@ Represents the Geo replication and verification state of a ci_secure_file. | |||
| | <a id="cisubscriptionsprojectid"></a>`id` | [`CiSubscriptionsProjectID`](#cisubscriptionsprojectid) | Global ID of the subscription. | | ||||
| | <a id="cisubscriptionsprojectupstreamproject"></a>`upstreamProject` | [`Project`](#project) | Upstream project of the subscription. | | ||||
| 
 | ||||
| ### `CiSubscriptionsProjectDetails` | ||||
| 
 | ||||
| #### Fields | ||||
| 
 | ||||
| | Name | Type | Description | | ||||
| | ---- | ---- | ----------- | | ||||
| | <a id="cisubscriptionsprojectdetailsid"></a>`id` | [`ID!`](#id) | ID of the project. | | ||||
| | <a id="cisubscriptionsprojectdetailsname"></a>`name` | [`ID!`](#id) | Full path of the project. | | ||||
| | <a id="cisubscriptionsprojectdetailsnamespace"></a>`namespace` | [`CiSubscriptionsProjectNamespaceDetails!`](#cisubscriptionsprojectnamespacedetails) | Namespace of the project. | | ||||
| | <a id="cisubscriptionsprojectdetailsweburl"></a>`webUrl` | [`String`](#string) | Web URL of the project. | | ||||
| 
 | ||||
| ### `CiSubscriptionsProjectNamespaceDetails` | ||||
| 
 | ||||
| #### Fields | ||||
| 
 | ||||
| | Name | Type | Description | | ||||
| | ---- | ---- | ----------- | | ||||
| | <a id="cisubscriptionsprojectnamespacedetailsid"></a>`id` | [`ID!`](#id) | ID of the project. | | ||||
| | <a id="cisubscriptionsprojectnamespacedetailsname"></a>`name` | [`ID!`](#id) | Full path of the project. | | ||||
| 
 | ||||
| ### `CiTemplate` | ||||
| 
 | ||||
| GitLab CI/CD configuration template. | ||||
|  | @ -30567,10 +30622,12 @@ Project-level settings for product analytics provider. | |||
| | <a id="projectciaccessauthorizedagents"></a>`ciAccessAuthorizedAgents` | [`ClusterAgentAuthorizationCiAccessConnection`](#clusteragentauthorizationciaccessconnection) | Authorized cluster agents for the project through ci_access keyword. (see [Connections](#connections)) | | ||||
| | <a id="projectcicdsettings"></a>`ciCdSettings` | [`ProjectCiCdSetting`](#projectcicdsetting) | CI/CD settings for the project. | | ||||
| | <a id="projectciconfigpathordefault"></a>`ciConfigPathOrDefault` | [`String!`](#string) | Path of the CI configuration file. | | ||||
| | <a id="projectcidownstreamprojectsubscriptions"></a>`ciDownstreamProjectSubscriptions` **{warning-solid}** | [`CiProjectSubscriptionConnection`](#ciprojectsubscriptionconnection) | **Introduced** in GitLab 17.6. **Status**: Experiment. Pipeline subscriptions where this project is the upstream project.When this project's pipeline completes, a pipeline is triggered in the downstream project. | | ||||
| | <a id="projectcijobtokenauthlogs"></a>`ciJobTokenAuthLogs` **{warning-solid}** | [`CiJobTokenAuthLogConnection`](#cijobtokenauthlogconnection) | **Introduced** in GitLab 17.6. **Status**: Experiment. The CI Job Tokens authorization logs. | | ||||
| | <a id="projectcijobtokenscope"></a>`ciJobTokenScope` | [`CiJobTokenScopeType`](#cijobtokenscopetype) | The CI Job Tokens scope of access. | | ||||
| | <a id="projectcisubscribedprojects"></a>`ciSubscribedProjects` | [`CiSubscriptionsProjectConnection`](#cisubscriptionsprojectconnection) | Pipeline subscriptions for projects subscribed to the project. (see [Connections](#connections)) | | ||||
| | <a id="projectcisubscriptionsprojects"></a>`ciSubscriptionsProjects` | [`CiSubscriptionsProjectConnection`](#cisubscriptionsprojectconnection) | Pipeline subscriptions for the project. (see [Connections](#connections)) | | ||||
| | <a id="projectcisubscribedprojects"></a>`ciSubscribedProjects` **{warning-solid}** | [`CiSubscriptionsProjectConnection`](#cisubscriptionsprojectconnection) | **Deprecated** in GitLab 17.6. Use `ciDownstreamProjectSubscriptions`. | | ||||
| | <a id="projectcisubscriptionsprojects"></a>`ciSubscriptionsProjects` **{warning-solid}** | [`CiSubscriptionsProjectConnection`](#cisubscriptionsprojectconnection) | **Deprecated** in GitLab 17.6. Use `ciUpstreamProjectSubscriptions`. | | ||||
| | <a id="projectciupstreamprojectsubscriptions"></a>`ciUpstreamProjectSubscriptions` **{warning-solid}** | [`CiProjectSubscriptionConnection`](#ciprojectsubscriptionconnection) | **Introduced** in GitLab 17.6. **Status**: Experiment. Pipeline subscriptions where this project is the downstream project.When an upstream project's pipeline completes, a pipeline is triggered in the downstream project (this project). | | ||||
| | <a id="projectcodecoveragesummary"></a>`codeCoverageSummary` | [`CodeCoverageSummary`](#codecoveragesummary) | Code coverage summary associated with the project. | | ||||
| | <a id="projectcomplianceframeworks"></a>`complianceFrameworks` | [`ComplianceFrameworkConnection`](#complianceframeworkconnection) | Compliance frameworks associated with the project. (see [Connections](#connections)) | | ||||
| | <a id="projectcontainerexpirationpolicy"></a>`containerExpirationPolicy` **{warning-solid}** | [`ContainerExpirationPolicy`](#containerexpirationpolicy) | **Deprecated** in GitLab 17.5. Use `container_tags_expiration_policy`. | | ||||
|  |  | |||
|  | @ -7,12 +7,12 @@ info: Analysis of Application Settings for Cells 1.0. | |||
| 
 | ||||
| ## Statistics | ||||
| 
 | ||||
| - Number of attributes: 499 | ||||
| - Number of attributes: 503 | ||||
| - Number of encrypted attributes: 43 (9.0%) | ||||
| - Number of attributes documented: 309 (62.0%) | ||||
| - Number of attributes on GitLab.com different from the defaults: 218 (44.0%) | ||||
| - Number of attributes with `clusterwide` set: 498 (100.0%) | ||||
| - Number of attributes with `clusterwide: true` set: 120 (24.0%) | ||||
| - Number of attributes documented: 310 (62.0%) | ||||
| - Number of attributes on GitLab.com different from the defaults: 220 (44.0%) | ||||
| - Number of attributes with `clusterwide` set: 503 (100.0%) | ||||
| - Number of attributes with `clusterwide: true` set: 121 (24.0%) | ||||
| 
 | ||||
| ## Individual columns | ||||
| 
 | ||||
|  | @ -33,7 +33,7 @@ info: Analysis of Application Settings for Cells 1.0. | |||
| | `allow_possible_spam` | `false` | `boolean` | `` | `true` | `false` | `false` | `false`| `false` | | ||||
| | `allow_project_creation_for_guest_and_below` | `false` | `boolean` | `boolean` | `true` | `true` | `false` | `false`| `true` | | ||||
| | `allow_runner_registration_token` | `false` | `boolean` | `boolean` | `true` | `true` | `false` | `false`| `true` | | ||||
| | `allow_top_level_group_owners_to_create_service_accounts` | `false` | `boolean` | `` | `true` | `false` | `false` | `???`| `false` | | ||||
| | `allow_top_level_group_owners_to_create_service_accounts` | `false` | `boolean` | `` | `true` | `false` | `false` | `false`| `false` | | ||||
| | `anthropic_api_key` | `true` | `bytea` | `` | `false` | `null` | `false` | `false`| `false` | | ||||
| | `archive_builds_in_seconds` | `false` | `integer` | `` | `false` | `null` | `false` | `false`| `false` | | ||||
| | `arkose_labs_client_secret` | `true` | `bytea` | `` | `false` | `null` | `true` | `true`| `false` | | ||||
|  | @ -240,6 +240,7 @@ info: Analysis of Application Settings for Cells 1.0. | |||
| | `housekeeping_incremental_repack_period` | `false` | `integer` | `integer` | `true` | `10` | `false` | `false`| `true` | | ||||
| | `html_emails_enabled` | `false` | `boolean` | `boolean` | `false` | `true` | `false` | `false`| `true` | | ||||
| | `id` | `false` | `bigint` | `` | `true` | `???` | `false` | `false`| `false` | | ||||
| | `identity_verification_settings` | `false` | `jsonb` | `` | `true` | `'{}'::jsonb` | `true` | `true`| `false` | | ||||
| | `import_sources` | `false` | `text` | `array of strings` | `false` | `null` | `true` | `true`| `true` | | ||||
| | `importers` | `false` | `jsonb` | `` | `true` | `'{}'::jsonb` | `true` | `true`| `false` | | ||||
| | `inactive_projects_delete_after_months` | `false` | `integer` | `` | `true` | `2` | `false` | `false`| `false` | | ||||
|  | @ -420,6 +421,7 @@ info: Analysis of Application Settings for Cells 1.0. | |||
| | `sidekiq_job_limiter_compression_threshold_bytes` | `false` | `integer` | `integer` | `true` | `100000` | `false` | `false`| `true` | | ||||
| | `sidekiq_job_limiter_limit_bytes` | `false` | `integer` | `integer` | `true` | `0` | `true` | `false`| `true` | | ||||
| | `sidekiq_job_limiter_mode` | `false` | `smallint` | `string` | `true` | `1` | `false` | `false`| `true` | | ||||
| | `sign_in_restrictions` | `false` | `jsonb` | `` | `true` | `'{}'::jsonb` | `true` | `false`| `false` | | ||||
| | `signup_enabled` | `false` | `boolean` | `boolean` | `false` | `null` | `true` | `false`| `true` | | ||||
| | `silent_mode_enabled` | `false` | `boolean` | `boolean` | `true` | `false` | `false` | `false`| `true` | | ||||
| | `slack_app_enabled` | `false` | `boolean` | `boolean` | `false` | `false` | `true` | `false`| `true` | | ||||
|  | @ -486,6 +488,7 @@ info: Analysis of Application Settings for Cells 1.0. | |||
| | `throttle_unauthenticated_period_in_seconds` | `false` | `integer` | `integer` | `true` | `3600` | `true` | `false`| `true` | | ||||
| | `throttle_unauthenticated_requests_per_period` | `false` | `integer` | `integer` | `true` | `3600` | `true` | `false`| `true` | | ||||
| | `time_tracking_limit_to_hours` | `false` | `boolean` | `boolean` | `true` | `false` | `true` | `false`| `true` | | ||||
| | `transactional_emails` | `false` | `jsonb` | `` | `true` | `'{}'::jsonb` | `false` | `false`| `false` | | ||||
| | `two_factor_grace_period` | `false` | `integer` | `integer` | `false` | `48` | `false` | `false`| `true` | | ||||
| | `unconfirmed_users_delete_after_days` | `false` | `integer` | `integer` | `true` | `7` | `true` | `true`| `true` | | ||||
| | `unique_ips_limit_enabled` | `false` | `boolean` | `boolean` | `true` | `false` | `false` | `false`| `true` | | ||||
|  |  | |||
|  | @ -2073,7 +2073,7 @@ Searching is different from [filtering](#filter). | |||
| 
 | ||||
| When referring to the subscription billing model: | ||||
| 
 | ||||
| - For GitLab SaaS, use **seats**. Customers purchase seats. Users occupy seats when they are invited | ||||
| - For GitLab.com, use **seats**. Customers purchase seats. Users occupy seats when they are invited | ||||
|   to a group, with some [exceptions](../../../subscriptions/gitlab_com/index.md#how-seat-usage-is-determined). | ||||
| - For GitLab self-managed, use **users**. Customers purchase subscriptions for a specified number of **users**. | ||||
| 
 | ||||
|  |  | |||
|  | @ -0,0 +1,327 @@ | |||
| --- | ||||
| stage: Create | ||||
| group: Source Code | ||||
| description: Common commands and workflows. | ||||
| info: "To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments" | ||||
| --- | ||||
| 
 | ||||
| # File management | ||||
| 
 | ||||
| Git provides file management capabilities that help you to track changes, | ||||
| collaborate with others, and manage large files efficiently. | ||||
| 
 | ||||
| ## File history | ||||
| 
 | ||||
| Use `git log` to view a file's complete history and understand how it has changed over time. | ||||
| The file history shows you: | ||||
| 
 | ||||
| - The author of each change. | ||||
| - The date and time of each modification. | ||||
| - The specific changes made in each commit. | ||||
| 
 | ||||
| For example, to view `history` information about the `CONTRIBUTING.md` file in the root | ||||
| of the `gitlab` repository, run: | ||||
| 
 | ||||
| ```shell | ||||
| git log CONTRIBUTING.md | ||||
| ``` | ||||
| 
 | ||||
| Example output: | ||||
| 
 | ||||
| ```shell | ||||
| commit b350bf041666964c27834885e4590d90ad0bfe90 | ||||
| Author: Nick Malcolm <nmalcolm@gitlab.com> | ||||
| Date:   Fri Dec 8 13:43:07 2023 +1300 | ||||
| 
 | ||||
|     Update security contact and vulnerability disclosure info | ||||
| 
 | ||||
| commit 8e4c7f26317ff4689610bf9d031b4931aef54086 | ||||
| Author: Brett Walker <bwalker@gitlab.com> | ||||
| Date:   Fri Oct 20 17:53:25 2023 +0000 | ||||
| 
 | ||||
|     Fix link to Code of Conduct | ||||
| 
 | ||||
|     and condense some of the verbiage | ||||
| ``` | ||||
| 
 | ||||
| ## Check previous changes to a file | ||||
| 
 | ||||
| Use `git blame` to see who made the last change to a file and when. | ||||
| This helps to understand the context of a file's content, | ||||
| resolve conflicts, and identify the person responsible for a specific change. | ||||
| 
 | ||||
| If you want to find `blame` information about a `README.md` file in the local directory: | ||||
| 
 | ||||
| 1. Open a terminal or command prompt. | ||||
| 1. Go to your Git repository. | ||||
| 1. Run the following command: | ||||
| 
 | ||||
|    ```shell | ||||
|    git blame README.md | ||||
|    ``` | ||||
| 
 | ||||
| 1. To navigate the results page, press <kbd>Space</kbd>. | ||||
| 1. To exit out of the results, press <kbd>Q</kbd>. | ||||
| 
 | ||||
| This output displays the file content with annotations showing the commit SHA, author, | ||||
| and date for each line. For example: | ||||
| 
 | ||||
| ```shell | ||||
| 58233c4f1054c (Dan Rhodes           2022-05-13 07:02:20 +0000  1) ## Contributor License Agreement | ||||
| b87768f435185 (Jamie Hurewitz       2017-10-31 18:09:23 +0000  2) | ||||
| 8e4c7f26317ff (Brett Walker         2023-10-20 17:53:25 +0000  3) Contributions to this repository are subject to the | ||||
| 58233c4f1054c (Dan Rhodes           2022-05-13 07:02:20 +0000  4) | ||||
| ``` | ||||
| 
 | ||||
| ## Git LFS | ||||
| 
 | ||||
| Git Large File Storage (LFS) is an extension that helps you manage large files in Git repositories. | ||||
| It replaces large files with text pointers in Git, and stores the file contents on a remote server. | ||||
| 
 | ||||
| Prerequisites: | ||||
| 
 | ||||
| - Download and install the appropriate version of the [CLI extension for Git LFS](https://git-lfs.com) for your operating system. | ||||
| - [Configure your project to use Git LFS](lfs/index.md). | ||||
| - Install the Git LFS pre-push hook. To do this, run `git lfs install` in the root directory of your repository. | ||||
| 
 | ||||
| ### Add and track files | ||||
| 
 | ||||
| To add a large file into your Git repository and track it with Git LFS: | ||||
| 
 | ||||
| 1. Configure tracking for all files of a certain type. Replace `iso` with your desired file type: | ||||
| 
 | ||||
|    ```shell | ||||
|    git lfs track "*.iso" | ||||
|    ``` | ||||
| 
 | ||||
|    This command creates a `.gitattributes` file with instructions to handle all | ||||
|    ISO files with Git LFS. The following line is added to your `.gitattributes` file: | ||||
| 
 | ||||
|    ```plaintext | ||||
|    *.iso filter=lfs -text | ||||
|    ``` | ||||
| 
 | ||||
| 1. Add a file of that type, `.iso`, to your repository. | ||||
| 1. Track the changes to both the `.gitattributes` file and the `.iso` file: | ||||
| 
 | ||||
|    ```shell | ||||
|    git add . | ||||
|    ``` | ||||
| 
 | ||||
| 1. Ensure you've added both files: | ||||
| 
 | ||||
|    ```shell | ||||
|    git status | ||||
|    ``` | ||||
| 
 | ||||
|    The `.gitattributes` file must be included in your commit. | ||||
|    It if isn't included, Git does not track the ISO file with Git LFS. | ||||
| 
 | ||||
|    NOTE: | ||||
|    Ensure the files you're changing are not listed in a `.gitignore` file. | ||||
|    If they are, Git commits the change locally but doesn't push it to your upstream repository. | ||||
| 
 | ||||
| 1. Commit both files to your local copy of the repository: | ||||
| 
 | ||||
|    ```shell | ||||
|    git commit -m "Add an ISO file and .gitattributes" | ||||
|    ``` | ||||
| 
 | ||||
| 1. Push your changes upstream. Replace `main` with the name of your branch: | ||||
| 
 | ||||
|    ```shell | ||||
|    git push origin main | ||||
|    ``` | ||||
| 
 | ||||
| 1. Create a merge request. | ||||
| 
 | ||||
| NOTE: | ||||
| When you add a new file type to Git LFS tracking, existing files of this type | ||||
| are not converted to Git LFS. Only files of this type, added after you begin tracking, are added to Git LFS. Use `git lfs migrate` to convert existing files to use Git LFS. | ||||
| 
 | ||||
| ### Stop tracking a file | ||||
| 
 | ||||
| When you stop tracking a file with Git LFS, the file remains on disk because it's still | ||||
| part of your repository's history. | ||||
| 
 | ||||
| To stop tracking a file with Git LFS: | ||||
| 
 | ||||
| 1. Run the `git lfs untrack` command and provide the path to the file: | ||||
| 
 | ||||
|    ```shell | ||||
|    git lfs untrack doc/example.iso | ||||
|    ``` | ||||
| 
 | ||||
| 1. Use the `touch` command to convert it back to a standard file: | ||||
| 
 | ||||
|    ```shell | ||||
|    touch doc/example.iso | ||||
|    ``` | ||||
| 
 | ||||
| 1. Track the changes to the file: | ||||
| 
 | ||||
|    ```shell | ||||
|    git add . | ||||
|    ``` | ||||
| 
 | ||||
| 1. Commit and push your changes. | ||||
| 1. Create a merge request and request a review. | ||||
| 1. Merge the request into the target branch. | ||||
| 
 | ||||
| NOTE: | ||||
| If you delete an object tracked by Git LFS, without tracking it with `git lfs untrack`, | ||||
| the object shows as `modified` in `git status`. | ||||
| 
 | ||||
| ### Stop tracking all files of a single type | ||||
| 
 | ||||
| To stop tracking all files of a particular type in Git LFS: | ||||
| 
 | ||||
| 1. Run the `git lfs untrack` command and provide the file type to stop tracking: | ||||
| 
 | ||||
|    ```shell | ||||
|    git lfs untrack "*.iso" | ||||
|    ``` | ||||
| 
 | ||||
| 1. Use the `touch` command to convert the files back to standard files: | ||||
| 
 | ||||
|    ```shell | ||||
|    touch *.iso | ||||
|    ``` | ||||
| 
 | ||||
| 1. Track the changes to the files: | ||||
| 
 | ||||
|    ```shell | ||||
|    git add . | ||||
|    ``` | ||||
| 
 | ||||
| 1. Commit and push your changes. | ||||
| 1. Create a merge request and request a review. | ||||
| 1. Merge the request into the target branch. | ||||
| 
 | ||||
| ## File locks | ||||
| 
 | ||||
| File locks help prevent conflicts and ensure that only one person can edit a file at a time. | ||||
| It's a good option for: | ||||
| 
 | ||||
| - Binary files that can't be merged. For example, design files and videos. | ||||
| - Files that require exclusive access during editing. | ||||
| 
 | ||||
| Prerequisites: | ||||
| 
 | ||||
| - You must have [Git LFS installed](../../topics/git/lfs/index.md). | ||||
| - You must have the Maintainer role for the project. | ||||
| 
 | ||||
| ### Configure file locks | ||||
| 
 | ||||
| To configure file locks for a specific file type: | ||||
| 
 | ||||
| 1. Use the `git lfs track` command with the `--lockable` option. For example, to configure PNG files: | ||||
| 
 | ||||
|    ```shell | ||||
|    git lfs track "*.png" --lockable | ||||
|    ``` | ||||
| 
 | ||||
|    This command creates or updates your `.gitattributes` file with the following content: | ||||
| 
 | ||||
|     ```plaintext | ||||
|     *.png filter=lfs diff=lfs merge=lfs -text lockable | ||||
|     ``` | ||||
| 
 | ||||
| 1. Push the `.gitattributes` file to the remote repository for the changes to take effect. | ||||
| 
 | ||||
|    NOTE: | ||||
|    After a file type is registered as lockable, it is automatically marked as read-only. | ||||
| 
 | ||||
| #### Configure file locks without LFS | ||||
| 
 | ||||
| To register a file type as lockable without using Git LFS: | ||||
| 
 | ||||
| 1. Edit the `.gitattributes` file manually: | ||||
| 
 | ||||
|    ```shell | ||||
|    *.pdf lockable | ||||
|    ``` | ||||
| 
 | ||||
| 1. Push the `.gitattributes` file to the remote repository. | ||||
| 
 | ||||
| ### Lock and unlock files | ||||
| 
 | ||||
| To lock or unlock a file with exclusive file locking: | ||||
| 
 | ||||
| 1. Open a terminal window in your repository directory. | ||||
| 1. Run one of the following commands: | ||||
| 
 | ||||
|    ::Tabs | ||||
| 
 | ||||
|    :::TabTitle Lock a file | ||||
| 
 | ||||
|    ```shell | ||||
|    git lfs lock path/to/file.png | ||||
|    ``` | ||||
| 
 | ||||
|    :::TabTitle Unlock a file | ||||
| 
 | ||||
|    ```shell | ||||
|    git lfs unlock path/to/file.png | ||||
|    ``` | ||||
| 
 | ||||
|    :::TabTitle Unlock a file by ID | ||||
| 
 | ||||
|    ```shell | ||||
|    git lfs unlock --id=123 | ||||
|    ``` | ||||
| 
 | ||||
|    :::TabTitle Force unlock a file | ||||
| 
 | ||||
|    ```shell | ||||
|    git lfs unlock --id=123 --force | ||||
|    ``` | ||||
| 
 | ||||
|    ::EndTabs | ||||
| 
 | ||||
| ### View locked files | ||||
| 
 | ||||
| To view locked files: | ||||
| 
 | ||||
| 1. Open a terminal window in your repository. | ||||
| 1. Run the following command: | ||||
| 
 | ||||
|    ```shell | ||||
|    git lfs locks | ||||
|    ``` | ||||
| 
 | ||||
|    The output lists the locked files, the users who locked them, and the file IDs. | ||||
| 
 | ||||
| In the GitLab UI: | ||||
| 
 | ||||
| - The repository file tree displays an LFS badge for files tracked by Git LFS. | ||||
| - Exclusively-locked files show a padlock icon. | ||||
| LFS-Locked files | ||||
| 
 | ||||
| You can also [view and remove existing locks](../../user/project/file_lock.md) from the GitLab UI. | ||||
| 
 | ||||
| NOTE: | ||||
| When you rename an exclusively-locked file, the lock is lost. You must lock it again to keep it locked. | ||||
| 
 | ||||
| ### Lock and edit a file | ||||
| 
 | ||||
| To lock a file, edit it, and optionally unlock it: | ||||
| 
 | ||||
| 1. Lock the file: | ||||
| 
 | ||||
|    ```shell | ||||
|    git lfs lock <file_path> | ||||
|    ``` | ||||
| 
 | ||||
| 1. Edit the file. | ||||
| 1. Optional. Unlock the file when you're done: | ||||
| 
 | ||||
|    ```shell | ||||
|    git lfs unlock <file_path> | ||||
|    ``` | ||||
| 
 | ||||
| ## Related topics | ||||
| 
 | ||||
| - [File management with the GitLab UI](../../user/project/repository/files/index.md) | ||||
| - [Git Large File Storage (LFS) documentation](lfs/index.md) | ||||
| - [File locking](../../user/project/file_lock.md) | ||||
|  | @ -81,171 +81,6 @@ It offers both server settings and project-specific settings. | |||
|        handling for individual files and file types. | ||||
|   1. Add the files and file types you want to track with Git LFS. | ||||
| 
 | ||||
| ## Add a file with Git LFS | ||||
| 
 | ||||
| Prerequisites: | ||||
| 
 | ||||
| - You have downloaded and installed the appropriate version of the | ||||
|   [CLI extension for Git LFS](https://git-lfs.com) for your operating system. | ||||
| - Your project is [configured to use Git LFS](#configure-git-lfs-for-a-project). | ||||
| 
 | ||||
| To add a large file into your Git repository and immediately track it with Git LFS: | ||||
| 
 | ||||
| 1. To track all files of a certain type with Git LFS, rather than a single file, | ||||
|    run this command, replacing `iso` with your desired file type: | ||||
| 
 | ||||
|    ```shell | ||||
|    git lfs track "*.iso" | ||||
|    ``` | ||||
| 
 | ||||
|    This command creates a `.gitattributes` file with instructions to handle all | ||||
|    ISO files with Git LFS. The line in your `.gitattributes` file looks like this: | ||||
| 
 | ||||
|    ```plaintext | ||||
|    *.iso filter=lfs -text | ||||
|    ``` | ||||
| 
 | ||||
| 1. Add a file of that type (`.iso`) to your repository. | ||||
| 1. Tell Git to track the changes to both the `.gitattributes` file and the `.iso` file: | ||||
| 
 | ||||
|    ```shell | ||||
|    git add . | ||||
|    ``` | ||||
| 
 | ||||
| 1. To ensure you've added both files, run `git status`. If the `.gitattributes` file | ||||
|    isn't included in your commit, users who clone your repository don't get the | ||||
|    files they need. | ||||
| 1. Commit both files to your local copy of your repository: | ||||
| 
 | ||||
|    ```shell | ||||
|    git commit -am "Add an ISO file and .gitattributes" | ||||
|    ``` | ||||
| 
 | ||||
| 1. Push your changes back upstream, replacing `main` with the name of your branch: | ||||
| 
 | ||||
|    ```shell | ||||
|    git push origin main | ||||
|    ``` | ||||
| 
 | ||||
|    Make sure the files you are changing aren't listed in a `.gitignore` file. | ||||
|    If this file (or file type) is in your `.gitignore` file, Git commits | ||||
|    the change locally, but does not push it to your upstream repository. | ||||
| 
 | ||||
| 1. Create your merge request. | ||||
| 
 | ||||
| ### Add a file type to Git LFS | ||||
| 
 | ||||
| When you add a new file type into Git LFS tracking, existing files of this type | ||||
| are _not_ converted to Git LFS. Files of this type added _after_ you begin | ||||
| tracking are added to Git LFS. To convert existing files of that type to | ||||
| use Git LFS, use `git lfs migrate`. | ||||
| 
 | ||||
| Prerequisites: | ||||
| 
 | ||||
| - You have downloaded and installed the appropriate version of the | ||||
|   [CLI extension for Git LFS](https://git-lfs.com) for your operating system. | ||||
| - Your project is [configured to use Git LFS](#configure-git-lfs-for-a-project). | ||||
| 
 | ||||
| To start tracking a file type in Git LFS: | ||||
| 
 | ||||
| 1. Make sure this file type isn't listed in your project's `.gitignore` file. | ||||
|    If this file type is in your `.gitignore` file, Git commits your changes | ||||
|    locally, but does not push it to your upstream repository. | ||||
| 1. Decide what file types to track with Git LFS. For each file type, run this | ||||
|    command, replacing `iso` with your desired file type: | ||||
| 
 | ||||
|    ```shell | ||||
|    git lfs track "*.iso" | ||||
|    ``` | ||||
| 
 | ||||
| 1. Tell Git to track the changes to the `.gitattributes` file. Commit the | ||||
|    file to your local copy of your repository, replacing `iso` with your desired file type: | ||||
| 
 | ||||
|    ```shell | ||||
|    git add . | ||||
|    git commit -am "Use Git LFS for files of type .iso" | ||||
|    ``` | ||||
| 
 | ||||
| 1. Push your changes back upstream, replacing `filetype` with the name of your branch: | ||||
| 
 | ||||
|    ```shell | ||||
|    git push origin filetype | ||||
|    ``` | ||||
| 
 | ||||
| ## Stop tracking a file with Git LFS | ||||
| 
 | ||||
| When you stop tracking a file with Git LFS, the file remains on disk because it remains part of your repository's | ||||
| history. To understand why, see [Delete a Git LFS file from repository history](#delete-a-git-lfs-file-from-repository-history). | ||||
| 
 | ||||
| Prerequisites: | ||||
| 
 | ||||
| - You have downloaded and installed the appropriate version of the | ||||
|   [CLI extension for Git LFS](https://git-lfs.com) for your operating system. | ||||
| - You have installed the Git LFS pre-push hook by running `git lfs install` | ||||
|   in the root directory of your repository. | ||||
| 
 | ||||
| To stop tracking a file with Git LFS: | ||||
| 
 | ||||
| 1. Run the [`git lfs untrack`](https://github.com/git-lfs/git-lfs/blob/main/docs/man/git-lfs-untrack.adoc) | ||||
|    command and provide the path to the file: | ||||
| 
 | ||||
|    ```shell | ||||
|    git lfs untrack doc/example.iso | ||||
|    ``` | ||||
| 
 | ||||
| 1. Use the `touch` command to convert it back to a standard file: | ||||
| 
 | ||||
|    ```shell | ||||
|    touch doc/example.iso | ||||
|    ``` | ||||
| 
 | ||||
| 1. Tell Git to track the changes to the file: | ||||
| 
 | ||||
|    ```shell | ||||
|    git add . | ||||
|    ``` | ||||
| 
 | ||||
| 1. Commit and push your changes. | ||||
| 1. Create a merge request and request a review. | ||||
| 1. After you get the required approvals, merge the request into the target branch. | ||||
| 
 | ||||
| If you delete an object (`example.iso`) tracked by Git LFS, but don't use | ||||
| the `git lfs untrack` command, `example.iso` shows as `modified` in `git status`. | ||||
| 
 | ||||
| ### Stop tracking all files of a single type | ||||
| 
 | ||||
| Prerequisites: | ||||
| 
 | ||||
| - You have downloaded and installed the appropriate version of the | ||||
|   [CLI extension for Git LFS](https://git-lfs.com) for your operating system. | ||||
| - You have installed the Git LFS pre-push hook by running `git lfs install` | ||||
|   in the root directory of your repository. | ||||
| 
 | ||||
| To stop tracking all files of a particular type in Git LFS: | ||||
| 
 | ||||
| 1. Run the [`git lfs untrack`](https://github.com/git-lfs/git-lfs/blob/main/docs/man/git-lfs-untrack.adoc) | ||||
|    command and provide the file type to stop tracking: | ||||
| 
 | ||||
|    ```shell | ||||
|    git lfs untrack "*.iso" | ||||
|    ``` | ||||
| 
 | ||||
| 1. Use the `touch` command to convert the files back to standard files: | ||||
| 
 | ||||
|    ```shell | ||||
|    touch *.iso | ||||
|    ``` | ||||
| 
 | ||||
| 1. Tell Git to track the changes to the files: | ||||
| 
 | ||||
|    ```shell | ||||
|    git add . | ||||
|    ``` | ||||
| 
 | ||||
| 1. Commit and push your changes. | ||||
| 1. Create a merge request and request a review. | ||||
| 1. After you get the required approvals, merge the request into the target branch. | ||||
| 
 | ||||
| ## Enable or disable Git LFS for a project | ||||
| 
 | ||||
| Git LFS is enabled by default for both self-managed instances and GitLab.com. | ||||
|  | @ -262,6 +97,12 @@ To enable or disable Git LFS at the project level: | |||
| 1. Select the **Git Large File Storage (LFS)** toggle. | ||||
| 1. Select **Save changes**. | ||||
| 
 | ||||
| ## Add and track files | ||||
| 
 | ||||
| You can add large files to Git LFS. This helps you manage files in Git repositories. | ||||
| When you track files with Git LFS, they are replaced with text pointers in Git, | ||||
| and stored on a remote server. For more information, see [Git LFS](../../../topics/git/file_management.md#git-lfs). | ||||
| 
 | ||||
| ## Clone a repository that uses Git LFS | ||||
| 
 | ||||
| When you clone a repository that uses Git LFS, Git detects the LFS-tracked files | ||||
|  | @ -308,8 +149,9 @@ the total size of your repository, see | |||
| 
 | ||||
| ## Related topics | ||||
| 
 | ||||
| - Use Git LFS to set up [exclusive file locks](../../../user/project/file_lock.md#exclusive-file-locks). | ||||
| - Use Git LFS to set up [exclusive file locks](../file_management.md#configure-file-locks). | ||||
| - Blog post: [Getting started with Git LFS](https://about.gitlab.com/blog/2017/01/30/getting-started-with-git-lfs-tutorial/) | ||||
| - [Git LFS with Git](../../../topics/git/file_management.md#git-lfs) | ||||
| - [Git LFS developer information](../../../development/lfs.md) | ||||
| - [GitLab Git Large File Storage (LFS) Administration](../../../administration/lfs/index.md) for self-managed instances | ||||
| - [Troubleshooting Git LFS](troubleshooting.md) | ||||
|  |  | |||
|  | @ -500,26 +500,6 @@ You can configure a custom limit on self-managed instances with the `scan_execut | |||
| 
 | ||||
| <div class="deprecation breaking-change" data-milestone="18.0"> | ||||
| 
 | ||||
| ### List container registry repository tags API endpoint pagination | ||||
| 
 | ||||
| <div class="deprecation-notes"> | ||||
| 
 | ||||
| - Announced in GitLab <span class="milestone">16.10</span> | ||||
| - Removal in GitLab <span class="milestone">18.0</span> ([breaking change](https://docs.gitlab.com/ee/update/terminology.html#breaking-change)) | ||||
| - To discuss this change or learn more, see the [deprecation issue](https://gitlab.com/gitlab-org/gitlab/-/issues/432470). | ||||
| 
 | ||||
| </div> | ||||
| 
 | ||||
| You can use the container registry REST API to [get a list of registry repository tags](https://docs.gitlab.com/ee/api/container_registry.html#list-registry-repository-tags). We plan to improve this endpoint, adding more metadata and new features like improved sorting and filtering. | ||||
| 
 | ||||
| While offset-based pagination was already available for this endpoint, keyset-based pagination was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/432470) in GitLab 16.10 for GitLab.com only. This is now the preferred pagination method. | ||||
| 
 | ||||
| Offset-based pagination for the [List registry repository tags](https://docs.gitlab.com/ee/api/container_registry.html#list-registry-repository-tags) endpoint is deprecated in GitLab 16.10 and will be removed in 18.0. Instead, use the keyset-based pagination. | ||||
| 
 | ||||
| </div> | ||||
| 
 | ||||
| <div class="deprecation breaking-change" data-milestone="18.0"> | ||||
| 
 | ||||
| ### OpenTofu CI/CD template | ||||
| 
 | ||||
| <div class="deprecation-notes"> | ||||
|  |  | |||
|  | @ -27,7 +27,7 @@ said to have "released the lock". | |||
| 
 | ||||
| GitLab supports two different modes of file locking: | ||||
| 
 | ||||
| - [Exclusive file locks](#exclusive-file-locks) for binary files: done | ||||
| - [Exclusive file locks](../../topics/git/file_management.md#file-locks) for binary files: done | ||||
|   **through the command line** with Git LFS and `.gitattributes`, it prevents locked | ||||
|   files from being modified on any branch. | ||||
| - [Default branch locks](#default-branch-file-and-directory-locks): done | ||||
|  | @ -44,156 +44,6 @@ users are prevented from modifying locked files by pushing, merging, | |||
| or any other means, and are shown an error like: | ||||
| `'.gitignore' is locked by @Administrator`. | ||||
| 
 | ||||
| ## Exclusive file locks | ||||
| 
 | ||||
| This process allows you to lock single files or file extensions and it is | ||||
| done through the command line. It doesn't require GitLab paid subscriptions. | ||||
| 
 | ||||
| Git LFS is well known for tracking files to reduce the storage of | ||||
| Git repositories, but it can also be used for [locking files](https://github.com/git-lfs/git-lfs/wiki/File-Locking). | ||||
| This is the method used for Exclusive File Locks. | ||||
| 
 | ||||
| ### Install Git LFS | ||||
| 
 | ||||
| Before getting started, make sure you have [Git LFS installed](../../topics/git/lfs/index.md) in your computer. Open a terminal window and run: | ||||
| 
 | ||||
| ```shell | ||||
| git-lfs --version | ||||
| ``` | ||||
| 
 | ||||
| If it doesn't recognize this command, you must install it. There are | ||||
| several [installation methods](https://git-lfs.com/) that you can | ||||
| choose according to your OS. To install it with Homebrew: | ||||
| 
 | ||||
| ```shell | ||||
| brew install git-lfs | ||||
| ``` | ||||
| 
 | ||||
| Once installed, **open your local repository in a terminal window** and | ||||
| install Git LFS in your repository. If you're sure that LFS is already installed, | ||||
| you can skip this step. If you're unsure, re-installing it does no harm: | ||||
| 
 | ||||
| ```shell | ||||
| git lfs install | ||||
| ``` | ||||
| 
 | ||||
| For more information, see [Git Large File Storage (LFS)](../../topics/git/lfs/index.md). | ||||
| 
 | ||||
| ### Configure Exclusive File Locks | ||||
| 
 | ||||
| You need the Maintainer role | ||||
| Exclusive File Locks for your project through the command line. | ||||
| 
 | ||||
| The first thing to do before using File Locking is to tell Git LFS which | ||||
| kind of files are lockable. The following command stores PNG files | ||||
| in LFS and flag them as lockable: | ||||
| 
 | ||||
| ```shell | ||||
| git lfs track "*.png" --lockable | ||||
| ``` | ||||
| 
 | ||||
| After executing the above command a file named `.gitattributes` is | ||||
| created or updated with the following content: | ||||
| 
 | ||||
| ```shell | ||||
| *.png filter=lfs diff=lfs merge=lfs -text lockable | ||||
| ``` | ||||
| 
 | ||||
| You can also register a file type as lockable without using LFS (to be able, for example, | ||||
| to lock/unlock a file you need in a remote server that | ||||
| implements the LFS File Locking API). To do that you can edit the | ||||
| `.gitattributes` file manually: | ||||
| 
 | ||||
| ```shell | ||||
| *.pdf lockable | ||||
| ``` | ||||
| 
 | ||||
| The `.gitattributes` file is key to the process and **must** | ||||
| be pushed to the remote repository for the changes to take effect. | ||||
| 
 | ||||
| After a file type has been registered as lockable, Git LFS makes | ||||
| them read-only on the file system automatically. This means you | ||||
| must **lock the file** before [editing it](#edit-lockable-files). | ||||
| 
 | ||||
| ### Lock files | ||||
| 
 | ||||
| By locking a file, you verify that no one else is editing it, and | ||||
| prevent anyone else from editing the file until you're done. On the other | ||||
| hand, when you unlock a file, you communicate that you've finished editing | ||||
| and allow other people to edit it. | ||||
| 
 | ||||
| To lock or unlock a file with Exclusive File Locking, open a terminal window | ||||
| in your repository directory and run the commands as described below. | ||||
| 
 | ||||
| To **lock** a file: | ||||
| 
 | ||||
| ```shell | ||||
| git lfs lock path/to/file.png | ||||
| ``` | ||||
| 
 | ||||
| To **unlock** a file: | ||||
| 
 | ||||
| ```shell | ||||
| git lfs unlock path/to/file.png | ||||
| ``` | ||||
| 
 | ||||
| You can also unlock by file ID (given by LFS when you [view locked files](#view-exclusively-locked-files)): | ||||
| 
 | ||||
| ```shell | ||||
| git lfs unlock --id=123 | ||||
| ``` | ||||
| 
 | ||||
| If for some reason you need to unlock a file that was not locked by | ||||
| yourself, you can use the `--force` flag as long as you have **Maintainer** | ||||
| permissions to the project: | ||||
| 
 | ||||
| ```shell | ||||
| git lfs unlock --id=123 --force | ||||
| ``` | ||||
| 
 | ||||
| You can push files to GitLab whether they're locked or unlocked. | ||||
| 
 | ||||
| NOTE: | ||||
| Although multi-branch file locks can be created and managed through the Git LFS | ||||
| command-line interface, file locks can be created for any file. | ||||
| 
 | ||||
| ### View exclusively-locked files | ||||
| 
 | ||||
| To list all the files locked with LFS locally, open a terminal window in your | ||||
| repository and run: | ||||
| 
 | ||||
| ```shell | ||||
| git lfs locks | ||||
| ``` | ||||
| 
 | ||||
| The output lists the locked files followed by the user who locked each of them | ||||
| and the files' IDs. | ||||
| 
 | ||||
| On the repository file tree, GitLab displays an LFS badge for files | ||||
| tracked by Git LFS plus a padlock icon on exclusively-locked files: | ||||
| 
 | ||||
|  | ||||
| 
 | ||||
| You can also [view and remove existing locks](#view-and-remove-existing-locks) from the GitLab UI. | ||||
| 
 | ||||
| NOTE: | ||||
| When you rename an exclusively-locked file, the lock is lost. You must | ||||
| lock it again to keep it locked. | ||||
| 
 | ||||
| ### Edit lockable files | ||||
| 
 | ||||
| After the file is [configured as lockable](#configure-exclusive-file-locks), it is set to read-only. | ||||
| Therefore, you need to lock it before editing it. | ||||
| 
 | ||||
| Suggested workflow for shared projects: | ||||
| 
 | ||||
| 1. Lock the file. | ||||
| 1. Edit the file. | ||||
| 1. Commit your changes. | ||||
| 1. Push to the repository. | ||||
| 1. Get your changes reviewed, approved, and merged. | ||||
| 1. Unlock the file. | ||||
| 
 | ||||
| ## Default branch file and directory locks | ||||
| 
 | ||||
| DETAILS: | ||||
|  | @ -234,3 +84,8 @@ This list shows all the files locked either through LFS or GitLab UI. | |||
| 
 | ||||
| Locks can be removed by their author, or any user | ||||
| with at least the Maintainer role. | ||||
| 
 | ||||
| ## Related topics | ||||
| 
 | ||||
| - [File management with Git](../../topics/git/file_management.md) | ||||
| - [File locks](../../topics/git/file_management.md#file-locks) | ||||
|  |  | |||
|  | @ -55,35 +55,8 @@ To see earlier revisions of a specific line: | |||
| 1. Select **View blame prior to this change** (**{doc-versions}**) | ||||
|    until you've found the changes you're interested in viewing. | ||||
| 
 | ||||
| ## Associated `git` command | ||||
| 
 | ||||
| If you're running `git` from the command line, the equivalent command is | ||||
| `git blame <filename>`. For example, if you want to find `blame` information | ||||
| about a `README.md` file in the local directory: | ||||
| 
 | ||||
| 1. Run this command `git blame README.md`. | ||||
| 1. If the line you want to see is not in the first page of results, press <kbd>Space</kbd> | ||||
|    until you find the line you want. | ||||
| 1. To exit out of the results, press <kbd>Q</kbd>. | ||||
| 
 | ||||
| The `git blame` output in the CLI looks like this: | ||||
| 
 | ||||
| ```shell | ||||
| 58233c4f1054c (Dan Rhodes           2022-05-13 07:02:20 +0000  1) ## Contributor License Agreement | ||||
| b87768f435185 (Jamie Hurewitz       2017-10-31 18:09:23 +0000  2) | ||||
| 8e4c7f26317ff (Brett Walker         2023-10-20 17:53:25 +0000  3) Contributions to this repository are subject to the | ||||
| 58233c4f1054c (Dan Rhodes           2022-05-13 07:02:20 +0000  4) | ||||
| ``` | ||||
| 
 | ||||
| The output includes: | ||||
| 
 | ||||
| - The SHA of the commit. | ||||
| - The name of the committer. | ||||
| - The date and time in UTC format. | ||||
| - The line number. | ||||
| - The contents of the line. | ||||
| 
 | ||||
| ## Related topics | ||||
| 
 | ||||
| - [Git file blame REST API](../../../../api/repository_files.md#get-file-blame-from-repository) | ||||
| - [Common Git commands](../../../../topics/git/commands.md) | ||||
| - [File management with Git](../../../../topics/git/file_management.md) | ||||
|  |  | |||
|  | @ -31,7 +31,7 @@ GitLab retrieves the user name and email information from the | |||
| [Git configuration](https://git-scm.com/book/en/v2/Customizing-Git-Git-Configuration) | ||||
| of the contributor when the user creates a commit. | ||||
| 
 | ||||
| ## View a file's Git history in the UI | ||||
| ## View a file's Git history | ||||
| 
 | ||||
| To see a file's Git history in the UI: | ||||
| 
 | ||||
|  | @ -40,34 +40,11 @@ To see a file's Git history in the UI: | |||
| 1. Go to your desired file in the repository. | ||||
| 1. In the upper-right corner, select **History**. | ||||
| 
 | ||||
| ## In the CLI | ||||
| 
 | ||||
| To see the history of a file from the command line, use the `git log <filename>` command. | ||||
| For example, to see `history` information about the `CONTRIBUTING.md` file in the root | ||||
| of the `gitlab` repository, run this command: | ||||
| 
 | ||||
| ```shell | ||||
| $ git log CONTRIBUTING.md | ||||
| 
 | ||||
| commit b350bf041666964c27834885e4590d90ad0bfe90 | ||||
| Author: Nick Malcolm <nmalcolm@gitlab.com> | ||||
| Date:   Fri Dec 8 13:43:07 2023 +1300 | ||||
| 
 | ||||
|     Update security contact and vulnerability disclosure info | ||||
| 
 | ||||
| commit 8e4c7f26317ff4689610bf9d031b4931aef54086 | ||||
| Author: Brett Walker <bwalker@gitlab.com> | ||||
| Date:   Fri Oct 20 17:53:25 2023 +0000 | ||||
| 
 | ||||
|     Fix link to Code of Conduct | ||||
| 
 | ||||
|     and condense some of the verbiage | ||||
| ``` | ||||
| 
 | ||||
| ## Related topics | ||||
| 
 | ||||
| - [Git blame](git_blame.md) for line-by-line information about a file | ||||
| - [Common Git commands](../../../../topics/git/commands.md) | ||||
| - [File management with Git](../../../../topics/git/file_management.md) | ||||
| 
 | ||||
| ## Troubleshooting | ||||
| 
 | ||||
|  |  | |||
|  | @ -130,6 +130,7 @@ To change the default handling of a file or file type, create a | |||
| ## Related topics | ||||
| 
 | ||||
| - [Repository files API](../../../../api/repository_files.md) | ||||
| - [File management with Git](../../../../topics/git/file_management.md) | ||||
| 
 | ||||
| ## Troubleshooting | ||||
| 
 | ||||
|  |  | |||
|  | @ -30,8 +30,9 @@ const extendConfigs = [ | |||
| // rewrite.
 | ||||
| let jhConfigs = []; | ||||
| if (existsSync(path.resolve(dirname, 'jh'))) { | ||||
|   // eslint-disable-next-line import/no-unresolved, import/extensions
 | ||||
|   jhConfigs = (await import('jh/eslint.config.js')).default; | ||||
|   const pathToJhConfig = path.resolve(dirname, 'jh/eslint.config.js') | ||||
|   // eslint-disable-next-line import/no-dynamic-require, no-unsanitized/method
 | ||||
|   jhConfigs = (await import(pathToJhConfig)).default; | ||||
| } | ||||
| 
 | ||||
| const jestConfig = { | ||||
|  |  | |||
|  | @ -11,6 +11,17 @@ module Keeps | |||
|         @rewriter = Parser::Source::TreeRewriter.new(@source.buffer) | ||||
|       end | ||||
| 
 | ||||
|       class << self | ||||
|         # Define a node matcher method in the +RuboCop::AST::Node+, which all other node types inherits from. | ||||
|         def def_node_matcher(method_name, pattern) | ||||
|           RuboCop::AST::NodePattern.new(pattern).def_node_matcher(RuboCop::AST::Node, method_name) | ||||
| 
 | ||||
|           define_method method_name do | ||||
|             source.ast.public_send(method_name) # rubocop:disable GitlabSecurity/PublicSend -- it's used to evaluate the node matcher at instance level | ||||
|           end | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       def replace_method_content(method_name, content, strip_comments_from_file: false) | ||||
|         method = source.ast.each_node(:class).first.each_node(:def).find do |child| | ||||
|           child.method_name == method_name.to_sym | ||||
|  | @ -25,6 +36,12 @@ module Keeps | |||
|         process | ||||
|       end | ||||
| 
 | ||||
|       def replace_as_string(node, content) | ||||
|         rewriter.replace(node.loc.expression, content) | ||||
| 
 | ||||
|         process | ||||
|       end | ||||
| 
 | ||||
|       private | ||||
| 
 | ||||
|       attr_reader :file, :source, :rewriter, :corrector | ||||
|  | @ -52,7 +69,7 @@ module Keeps | |||
|       end | ||||
| 
 | ||||
|       def process | ||||
|         @process ||= rewriter.process.lstrip.gsub(/\n{3,}/, "\n\n") | ||||
|         rewriter.process.lstrip.gsub(/\n{3,}/, "\n\n") | ||||
|       end | ||||
|     end | ||||
|   end | ||||
|  |  | |||
|  | @ -0,0 +1,135 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| require_relative '../rubocop/cop_todo' | ||||
| 
 | ||||
| module Keeps | ||||
|   # This is an implementation of ::Gitlab::Housekeeper::Keep. | ||||
|   # This changes workers which have `data_consistency: :always` to `:sticky`. | ||||
|   # | ||||
|   # You can run it individually with: | ||||
|   # | ||||
|   # ``` | ||||
|   # bundle exec gitlab-housekeeper -d -k Keeps::UpdateWorkersDataConsistency | ||||
|   # ``` | ||||
|   class UpdateWorkersDataConsistency < ::Gitlab::Housekeeper::Keep | ||||
|     WORKER_REGEX = %r{app/workers/(.+).rb} | ||||
|     WORKERS_DATA_CONSISTENCY_PATH = '.rubocop_todo/sidekiq_load_balancing/worker_data_consistency.yml' | ||||
|     FALLBACK_FEATURE_CATEGORY = 'database' | ||||
|     LIMIT_TO = 5 | ||||
| 
 | ||||
|     def initialize(...) | ||||
|       ::Keeps::Helpers::FileHelper.def_node_matcher :data_consistency_node, <<~PATTERN | ||||
|           `(send nil? :data_consistency $(sym _) ...) | ||||
|       PATTERN | ||||
| 
 | ||||
|       super | ||||
|     end | ||||
| 
 | ||||
|     def each_change | ||||
|       workers_by_feature_category.deep_dup.each do |feature_category, workers| | ||||
|         remove_workers_from_list(workers.pluck(:path)) # rubocop:disable CodeReuse/ActiveRecord -- small dataset | ||||
| 
 | ||||
|         workers.each do |worker| | ||||
|           file_helper = ::Keeps::Helpers::FileHelper.new(worker[:path]) | ||||
|           node = file_helper.data_consistency_node | ||||
|           File.write(worker[:path], file_helper.replace_as_string(node, ':sticky')) | ||||
|         end | ||||
| 
 | ||||
|         yield(build_change(feature_category, workers)) | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     private | ||||
| 
 | ||||
|     def workers_by_feature_category | ||||
|       worker_paths.each_with_object(Hash.new { |h, k| h[k] = [] }) do |worker_path, group| | ||||
|         next unless File.read(worker_path, mode: 'rb').include?('data_consistency :always') | ||||
| 
 | ||||
|         worker_name = worker_path.match(WORKER_REGEX)[1].camelize | ||||
| 
 | ||||
|         feature_category = worker_feature_category(worker_name) | ||||
| 
 | ||||
|         next if group[feature_category].size >= LIMIT_TO | ||||
| 
 | ||||
|         group[feature_category] << { path: worker_path, name: worker_name } | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     def build_change(feature_category, workers) | ||||
|       change = ::Gitlab::Housekeeper::Change.new | ||||
|       change.title = "Change data consistency for workers maintained by #{feature_category}".truncate(70, omission: '') | ||||
|       change.identifiers = workers.map { |worker| worker[:name].to_s }.prepend(feature_category) | ||||
|       change.labels = labels(feature_category) | ||||
|       change.reviewers = pick_reviewers(feature_category, change.identifiers) | ||||
|       change.changed_files = workers.pluck(:path).prepend(WORKERS_DATA_CONSISTENCY_PATH) # rubocop:disable CodeReuse/ActiveRecord -- small dataset | ||||
| 
 | ||||
|       change.description = <<~MARKDOWN.chomp | ||||
|         ## What does this MR | ||||
| 
 | ||||
|         It updates workers data consistency from `:always` to `:sticky` for workers maintained by `#{feature_category}`, | ||||
|         as a way to reduce database reads on the primary DB. Check https://gitlab.com/gitlab-org/gitlab/-/issues/462611. | ||||
| 
 | ||||
|         To reduce resource saturation on the primary node, all workers should be changed to `sticky`, at minimum. | ||||
| 
 | ||||
|         Since jobs are now enqueued along with the current database LSN, the replica (for `:sticky` or `:delayed`) | ||||
|         is guaranteed to be caught up to that point, or the job will be retried, or use the primary. Consider updating | ||||
|         the worker(s) to `delayed`, if it's applicable. | ||||
| 
 | ||||
|         You can read more about the Sidekiq Workers `data_consistency` in | ||||
|         https://docs.gitlab.com/ee/development/sidekiq/worker_attributes.html#job-data-consistency-strategies. | ||||
| 
 | ||||
|         You can use this [dashboard](https://log.gprd.gitlab.net/app/r/s/iyIUV) to monitor the worker query activity on | ||||
|         primary vs. replicas. | ||||
| 
 | ||||
|         Currently, the `gitlab-housekeeper` is not always capable of updating all references, so you must check the diff | ||||
|         and pipeline failures to confirm if there are any issues. | ||||
|       MARKDOWN | ||||
| 
 | ||||
|       change | ||||
|     end | ||||
| 
 | ||||
|     def labels(feature_category) | ||||
|       group_labels = groups_helper.labels_for_feature_category(feature_category) | ||||
| 
 | ||||
|       group_labels + %w[maintenance::scalability type::maintenance severity::3 priority::1] | ||||
|     end | ||||
| 
 | ||||
|     def pick_reviewers(feature_category, identifiers) | ||||
|       groups_helper.pick_reviewer_for_feature_category( | ||||
|         feature_category, | ||||
|         identifiers, | ||||
|         fallback_feature_category: 'database' | ||||
|       ) | ||||
|     end | ||||
| 
 | ||||
|     def worker_feature_category(worker_name) | ||||
|       feature_category = workers_meta.find { |entry| entry[:worker_name].to_s == worker_name.to_s } || {} | ||||
| 
 | ||||
|       feature_category.fetch(:feature_category, FALLBACK_FEATURE_CATEGORY).to_s | ||||
|     end | ||||
| 
 | ||||
|     def workers_meta | ||||
|       @workers_meta ||= Gitlab::SidekiqConfig::QUEUE_CONFIG_PATHS.flat_map do |yaml_file| | ||||
|         YAML.safe_load_file(yaml_file, permitted_classes: [Symbol]) | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     def remove_workers_from_list(paths_to_remove) | ||||
|       todo_helper = RuboCop::CopTodo.new('SidekiqLoadBalancing/WorkerDataConsistency') | ||||
|       todo_helper.add_files(worker_paths - paths_to_remove) | ||||
| 
 | ||||
|       File.write(WORKERS_DATA_CONSISTENCY_PATH, todo_helper.to_yaml) | ||||
|     end | ||||
| 
 | ||||
|     def worker_paths | ||||
|       @worker_paths ||= YAML.safe_load_file(WORKERS_DATA_CONSISTENCY_PATH).dig( | ||||
|         'SidekiqLoadBalancing/WorkerDataConsistency', | ||||
|         'Exclude' | ||||
|       ) | ||||
|     end | ||||
| 
 | ||||
|     def groups_helper | ||||
|       @groups_helper ||= ::Keeps::Helpers::Groups.new | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -6,7 +6,7 @@ module API | |||
| 
 | ||||
|     before { authenticate! } | ||||
| 
 | ||||
|     feature_category :team_planning | ||||
|     feature_category :notifications | ||||
|     urgency :low | ||||
| 
 | ||||
|     ISSUABLE_TYPES = { | ||||
|  |  | |||
|  | @ -23,6 +23,12 @@ module Gitlab | |||
|             end | ||||
|           end | ||||
| 
 | ||||
|           def variables_hash_expanded | ||||
|             strong_memoize(:variables_hash_expanded) do | ||||
|               variables.sort_and_expand_all.to_hash | ||||
|             end | ||||
|           end | ||||
| 
 | ||||
|           def project | ||||
|             pipeline.project | ||||
|           end | ||||
|  |  | |||
|  | @ -17,7 +17,7 @@ module Gitlab | |||
|           return true unless modified_paths | ||||
|           return false if modified_paths.empty? | ||||
| 
 | ||||
|           expanded_globs = expand_globs(context).uniq | ||||
|           expanded_globs = expand_globs(context, pipeline).uniq | ||||
|           return false if expanded_globs.empty? | ||||
| 
 | ||||
|           cache_key = [ | ||||
|  | @ -43,11 +43,17 @@ module Gitlab | |||
|           end | ||||
|         end | ||||
| 
 | ||||
|         def expand_globs(context) | ||||
|         def expand_globs(context, pipeline) | ||||
|           return paths unless context | ||||
| 
 | ||||
|           paths.map do |glob| | ||||
|             ExpandVariables.expand_existing(glob, -> { context.variables_hash }) | ||||
|           if Feature.enabled?(:expand_nested_variables_in_job_rules_exists_and_changes, pipeline.project) | ||||
|             paths.map do |glob| | ||||
|               expand_value_nested(glob, context) | ||||
|             end | ||||
|           else | ||||
|             paths.map do |glob| | ||||
|               expand_value(glob, context) | ||||
|             end | ||||
|           end | ||||
|         end | ||||
| 
 | ||||
|  | @ -70,12 +76,25 @@ module Gitlab | |||
|         def find_compare_to_sha(pipeline, context) | ||||
|           return unless @globs.include?(:compare_to) | ||||
| 
 | ||||
|           compare_to = ExpandVariables.expand(@globs[:compare_to], -> { context.variables_hash }) | ||||
|           compare_to = if Feature.enabled?(:expand_nested_variables_in_job_rules_exists_and_changes, pipeline.project) | ||||
|                          expand_value_nested(@globs[:compare_to], context) | ||||
|                        else | ||||
|                          expand_value(@globs[:compare_to], context) | ||||
|                        end | ||||
| 
 | ||||
|           commit = pipeline.project.commit(compare_to) | ||||
|           raise Rules::Rule::Clause::ParseError, 'rules:changes:compare_to is not a valid ref' unless commit | ||||
| 
 | ||||
|           commit.sha | ||||
|         end | ||||
| 
 | ||||
|         def expand_value(value, context) | ||||
|           ExpandVariables.expand_existing(value, -> { context.variables_hash }) | ||||
|         end | ||||
| 
 | ||||
|         def expand_value_nested(value, context) | ||||
|           ExpandVariables.expand_existing(value, -> { context.variables_hash_expanded }) | ||||
|         end | ||||
|       end | ||||
|     end | ||||
|   end | ||||
|  |  | |||
|  | @ -18,13 +18,13 @@ module Gitlab | |||
|           @ref = clause[:ref] | ||||
|         end | ||||
| 
 | ||||
|         def satisfied_by?(_pipeline, context) | ||||
|         def satisfied_by?(pipeline, context) | ||||
|           # Return early to avoid redundant Gitaly calls | ||||
|           return false unless @globs.any? | ||||
| 
 | ||||
|           context = change_context(context) if @project_path | ||||
|           context = change_context(context, pipeline) if @project_path | ||||
| 
 | ||||
|           expanded_globs = expand_globs(context) | ||||
|           expanded_globs = expand_globs(context, pipeline) | ||||
|           top_level_only = expanded_globs.all?(&method(:top_level_glob?)) | ||||
| 
 | ||||
|           paths = worktree_paths(context, top_level_only) | ||||
|  | @ -42,9 +42,15 @@ module Gitlab | |||
|           grouped.values_at(:exact, :extension, :pattern).map { |globs| Array(globs) } | ||||
|         end | ||||
| 
 | ||||
|         def expand_globs(context) | ||||
|           @globs.map do |glob| | ||||
|             expand_value(glob, context) | ||||
|         def expand_globs(context, pipeline) | ||||
|           if Feature.enabled?(:expand_nested_variables_in_job_rules_exists_and_changes, pipeline&.project) | ||||
|             @globs.map do |glob| | ||||
|               expand_value_nested(glob, context) | ||||
|             end | ||||
|           else | ||||
|             @globs.map do |glob| | ||||
|               expand_value(glob, context) | ||||
|             end | ||||
|           end | ||||
|         end | ||||
| 
 | ||||
|  | @ -121,10 +127,10 @@ module Gitlab | |||
|           glob.delete_prefix(WILDCARD_NESTED_PATTERN) | ||||
|         end | ||||
| 
 | ||||
|         def change_context(old_context) | ||||
|         def change_context(old_context, pipeline) | ||||
|           user = find_context_user(old_context) | ||||
|           new_project = find_context_project(user, old_context) | ||||
|           new_sha = find_context_sha(new_project, old_context) | ||||
|           new_project = find_context_project(user, old_context, pipeline) | ||||
|           new_sha = find_context_sha(new_project, old_context, pipeline) | ||||
| 
 | ||||
|           Gitlab::Ci::Config::External::Context.new( | ||||
|             project: new_project, | ||||
|  | @ -138,8 +144,13 @@ module Gitlab | |||
|           context.is_a?(Gitlab::Ci::Config::External::Context) ? context.user : context.pipeline.user | ||||
|         end | ||||
| 
 | ||||
|         def find_context_project(user, context) | ||||
|           full_path = expand_value(@project_path, context) | ||||
|         def find_context_project(user, context, pipeline) | ||||
|           full_path = if Feature.enabled?(:expand_nested_variables_in_job_rules_exists_and_changes, pipeline.project) | ||||
|                         expand_value_nested(@project_path, context) | ||||
|                       else | ||||
|                         expand_value(@project_path, context) | ||||
|                       end | ||||
| 
 | ||||
|           project = Project.find_by_full_path(full_path) | ||||
| 
 | ||||
|           unless project | ||||
|  | @ -156,10 +167,16 @@ module Gitlab | |||
|           project | ||||
|         end | ||||
| 
 | ||||
|         def find_context_sha(project, context) | ||||
|         def find_context_sha(project, context, pipeline) | ||||
|           return project.commit&.sha unless @ref | ||||
| 
 | ||||
|           ref = expand_value(@ref, context) | ||||
|           ref = if Feature.enabled?(:expand_nested_variables_in_job_rules_exists_and_changes, | ||||
|             pipeline.project) | ||||
|                   expand_value_nested(@ref, context) | ||||
|                 else | ||||
|                   expand_value(@ref, context) | ||||
|                 end | ||||
| 
 | ||||
|           commit = project.commit(ref) | ||||
| 
 | ||||
|           unless commit | ||||
|  | @ -184,6 +201,10 @@ module Gitlab | |||
|         def expand_value(value, context) | ||||
|           ExpandVariables.expand_existing(value, -> { context.variables_hash }) | ||||
|         end | ||||
| 
 | ||||
|         def expand_value_nested(value, context) | ||||
|           ExpandVariables.expand_existing(value, -> { context.variables_hash_expanded }) | ||||
|         end | ||||
|       end | ||||
|     end | ||||
|   end | ||||
|  |  | |||
|  | @ -61,6 +61,12 @@ module Gitlab | |||
|             end | ||||
|           end | ||||
| 
 | ||||
|           def variables_hash_expanded | ||||
|             strong_memoize(:variables_hash_expanded) do | ||||
|               variables.sort_and_expand_all.to_hash | ||||
|             end | ||||
|           end | ||||
| 
 | ||||
|           def mutate(attrs = {}) | ||||
|             self.class.new(**attrs) do |ctx| | ||||
|               ctx.pipeline = pipeline | ||||
|  |  | |||
|  | @ -57513,6 +57513,12 @@ msgid_plural "Todos|Marked %d to-dos as done" | |||
| msgstr[0] "" | ||||
| msgstr[1] "" | ||||
| 
 | ||||
| msgid "Todos|Marked as done" | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "Todos|Marked as undone" | ||||
| msgstr "" | ||||
| 
 | ||||
| msgid "Todos|Member access request" | ||||
| msgstr "" | ||||
| 
 | ||||
|  |  | |||
|  | @ -145,6 +145,7 @@ class ApplicationSettingsAnalysis | |||
|       help_page_support_url | ||||
|       help_page_text | ||||
|       home_page_url | ||||
|       identity_verification_settings | ||||
|       import_sources | ||||
|       importers | ||||
|       invisible_captcha_enabled | ||||
|  | @ -217,6 +218,7 @@ class ApplicationSettingsAnalysis | |||
|       shared_runners_minutes | ||||
|       shared_runners_text | ||||
|       sidekiq_job_limiter_limit_bytes | ||||
|       sign_in_restrictions | ||||
|       signup_enabled | ||||
|       silent_admin_exports_enabled | ||||
|       slack_app_enabled | ||||
|  |  | |||
|  | @ -612,47 +612,6 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte | |||
|     end | ||||
|   end | ||||
| 
 | ||||
|   describe 'GET dag' do | ||||
|     let(:pipeline) { create(:ci_pipeline, project: project) } | ||||
| 
 | ||||
|     it_behaves_like 'the show page', 'dag' | ||||
|   end | ||||
| 
 | ||||
|   describe 'GET dag.json' do | ||||
|     let(:pipeline) { create(:ci_pipeline, project: project) } | ||||
|     let(:build_stage) { create(:ci_stage, name: 'build', pipeline: pipeline) } | ||||
|     let(:test_stage) { create(:ci_stage, name: 'test', pipeline: pipeline) } | ||||
| 
 | ||||
|     before do | ||||
|       create_build(build_stage, 1, 'build') | ||||
|       create_build(test_stage, 2, 'test', scheduling_type: 'dag').tap do |job| | ||||
|         create(:ci_build_need, build: job, name: 'build') | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     it 'returns the pipeline with DAG serialization' do | ||||
|       get :dag, params: { namespace_id: project.namespace, project_id: project, id: pipeline }, format: :json | ||||
| 
 | ||||
|       expect(response).to have_gitlab_http_status(:ok) | ||||
| 
 | ||||
|       expect(json_response.fetch('stages')).not_to be_empty | ||||
| 
 | ||||
|       build_stage = json_response['stages'].first | ||||
|       expect(build_stage.fetch('name')).to eq 'build' | ||||
|       expect(build_stage.fetch('groups').first.fetch('jobs')) | ||||
|         .to eq [{ 'name' => 'build', 'scheduling_type' => 'stage' }] | ||||
| 
 | ||||
|       test_stage = json_response['stages'].last | ||||
|       expect(test_stage.fetch('name')).to eq 'test' | ||||
|       expect(test_stage.fetch('groups').first.fetch('jobs')) | ||||
|         .to eq [{ 'name' => 'test', 'scheduling_type' => 'dag', 'needs' => ['build'] }] | ||||
|     end | ||||
| 
 | ||||
|     def create_build(stage, stage_idx, name, params = {}) | ||||
|       create(:ci_build, pipeline: pipeline, ci_stage: stage, stage_idx: stage_idx, name: name, **params) | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   describe 'GET builds' do | ||||
|     let(:pipeline) { create(:ci_pipeline, project: project) } | ||||
| 
 | ||||
|  |  | |||
|  | @ -4,7 +4,7 @@ | |||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe 'Dashboard > User filters todos', :js, feature_category: :team_planning do | ||||
| RSpec.describe 'Dashboard > User filters todos', :js, feature_category: :notifications do | ||||
|   let(:user_1)    { create(:user, username: 'user_1', name: 'user_1') } | ||||
|   let(:user_2)    { create(:user, username: 'user_2', name: 'user_2') } | ||||
| 
 | ||||
|  |  | |||
|  | @ -4,7 +4,7 @@ | |||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe 'Dashboard > User sorts todos', feature_category: :team_planning do | ||||
| RSpec.describe 'Dashboard > User sorts todos', feature_category: :notifications do | ||||
|   let(:user)    { create(:user) } | ||||
|   let(:project) { create(:project) } | ||||
| 
 | ||||
|  |  | |||
|  | @ -4,7 +4,7 @@ | |||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe 'Dashboard Todos', :js, feature_category: :team_planning do | ||||
| RSpec.describe 'Dashboard Todos', :js, feature_category: :notifications do | ||||
|   include DesignManagementTestHelpers | ||||
| 
 | ||||
|   let_it_be(:user) { create(:user, username: 'john') } | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ | |||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe 'Manually create a todo item from issue', :js, feature_category: :team_planning do | ||||
| RSpec.describe 'Manually create a todo item from issue', :js, feature_category: :notifications do | ||||
|   let!(:project) { create(:project) } | ||||
|   let!(:issue)   { create(:issue, project: project) } | ||||
|   let!(:user)    { create(:user) } | ||||
|  |  | |||
|  | @ -168,15 +168,6 @@ RSpec.describe 'Project active tab', :js, feature_category: :groups_and_projects | |||
|         it_behaves_like 'page has active sub tab', _('Pipelines') | ||||
|       end | ||||
| 
 | ||||
|       context 'Needs tab' do | ||||
|         before do | ||||
|           visit dag_project_pipeline_path(project, pipeline) | ||||
|         end | ||||
| 
 | ||||
|         it_behaves_like 'page has active tab', _('Build') | ||||
|         it_behaves_like 'page has active sub tab', _('Pipelines') | ||||
|       end | ||||
| 
 | ||||
|       context 'Builds tab' do | ||||
|         before do | ||||
|           visit builds_project_pipeline_path(project, pipeline) | ||||
|  |  | |||
|  | @ -1220,25 +1220,6 @@ RSpec.describe 'Pipeline', :js, feature_category: :continuous_integration do | |||
|     end | ||||
|   end | ||||
| 
 | ||||
|   describe 'GET /:project/-/pipelines/:id/dag' do | ||||
|     include_context 'pipeline builds' | ||||
| 
 | ||||
|     let_it_be(:project) { create(:project, :repository) } | ||||
| 
 | ||||
|     let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master', sha: project.commit.id) } | ||||
| 
 | ||||
|     before do | ||||
|       visit dag_project_pipeline_path(project, pipeline) | ||||
|     end | ||||
| 
 | ||||
|     context 'page tabs' do | ||||
|       it 'shows Pipeline and Jobs tabs with link' do | ||||
|         expect(page).to have_link('Pipeline') | ||||
|         expect(page).to have_link('Jobs') | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   context 'when user sees pipeline flags in a pipeline detail page' do | ||||
|     let_it_be(:project) { create(:project, :repository) } | ||||
| 
 | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ | |||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe TodosFinder, feature_category: :team_planning do | ||||
| RSpec.describe TodosFinder, feature_category: :notifications do | ||||
|   describe '#execute' do | ||||
|     let_it_be(:user) { create(:user) } | ||||
|     let_it_be(:group) { create(:group) } | ||||
|  |  | |||
|  | @ -0,0 +1,63 @@ | |||
| import { shallowMount } from '@vue/test-utils'; | ||||
| import { GlButton } from '@gitlab/ui'; | ||||
| import TodoItemActions from '~/todos/components/todo_item_actions.vue'; | ||||
| import { TODO_STATE_DONE, TODO_STATE_PENDING } from '~/todos/constants'; | ||||
| 
 | ||||
| describe('TodoItemActions', () => { | ||||
|   let wrapper; | ||||
|   const mockTodo = { | ||||
|     id: 'gid://gitlab/Todo/1', | ||||
|     state: TODO_STATE_PENDING, | ||||
|   }; | ||||
| 
 | ||||
|   const createComponent = (props = {}) => { | ||||
|     wrapper = shallowMount(TodoItemActions, { | ||||
|       propsData: { | ||||
|         todo: mockTodo, | ||||
|         ...props, | ||||
|       }, | ||||
|       provide: { | ||||
|         currentTab: 0, | ||||
|       }, | ||||
|     }); | ||||
|   }; | ||||
| 
 | ||||
|   it('sets correct icon for pending todo action button', () => { | ||||
|     createComponent(); | ||||
|     expect(wrapper.findComponent(GlButton).props('icon')).toBe('check'); | ||||
|   }); | ||||
| 
 | ||||
|   it('sets correct icon for done todo action button', () => { | ||||
|     createComponent({ todo: { ...mockTodo, state: TODO_STATE_DONE } }); | ||||
|     expect(wrapper.findComponent(GlButton).props('icon')).toBe('redo'); | ||||
|   }); | ||||
| 
 | ||||
|   it('sets correct aria-label for pending todo', () => { | ||||
|     createComponent(); | ||||
|     expect(wrapper.findComponent(GlButton).attributes('aria-label')).toBe('Mark as done'); | ||||
|   }); | ||||
| 
 | ||||
|   it('sets correct aria-label for done todo', () => { | ||||
|     createComponent({ todo: { ...mockTodo, state: TODO_STATE_DONE } }); | ||||
|     expect(wrapper.findComponent(GlButton).attributes('aria-label')).toBe('Undo'); | ||||
|   }); | ||||
| 
 | ||||
|   describe('tooltipTitle', () => { | ||||
|     it('returns null when isLoading is true', () => { | ||||
|       createComponent(); | ||||
|       // eslint-disable-next-line no-restricted-syntax
 | ||||
|       wrapper.setData({ isLoading: true }); | ||||
|       expect(wrapper.vm.tooltipTitle).toBeNull(); | ||||
|     }); | ||||
| 
 | ||||
|     it('returns "Mark as done" for pending todo', () => { | ||||
|       createComponent(); | ||||
|       expect(wrapper.vm.tooltipTitle).toBe('Mark as done'); | ||||
|     }); | ||||
| 
 | ||||
|     it('returns "Undo" for done todo', () => { | ||||
|       createComponent({ todo: { ...mockTodo, state: TODO_STATE_DONE } }); | ||||
|       expect(wrapper.vm.tooltipTitle).toBe('Undo'); | ||||
|     }); | ||||
|   }); | ||||
| }); | ||||
|  | @ -1,4 +1,3 @@ | |||
| // write jest specs in this file, for the component in the todo_item_body.vue file
 | ||||
| import { shallowMount } from '@vue/test-utils'; | ||||
| import { GlLink, GlAvatar, GlAvatarLink } from '@gitlab/ui'; | ||||
| import TodoItemBody from '~/todos/components/todo_item_body.vue'; | ||||
|  |  | |||
|  | @ -0,0 +1,70 @@ | |||
| import { shallowMount } from '@vue/test-utils'; | ||||
| import TodoItem from '~/todos/components/todo_item.vue'; | ||||
| import TodoItemTitle from '~/todos/components/todo_item_title.vue'; | ||||
| import TodoItemBody from '~/todos/components/todo_item_body.vue'; | ||||
| import TodoItemTimestamp from '~/todos/components/todo_item_timestamp.vue'; | ||||
| import TodoItemActions from '~/todos/components/todo_item_actions.vue'; | ||||
| import { TODO_STATE_DONE, TODO_STATE_PENDING } from '~/todos/constants'; | ||||
| 
 | ||||
| describe('TodoItem', () => { | ||||
|   let wrapper; | ||||
| 
 | ||||
|   const createComponent = (props = {}) => { | ||||
|     wrapper = shallowMount(TodoItem, { | ||||
|       propsData: { | ||||
|         currentUserId: '1', | ||||
|         todo: { | ||||
|           id: '1', | ||||
|           state: TODO_STATE_PENDING, | ||||
|           targetType: 'Issue', | ||||
|           targetUrl: '/project/issue/1', | ||||
|         }, | ||||
|         ...props, | ||||
|       }, | ||||
|     }); | ||||
|   }; | ||||
| 
 | ||||
|   it('renders the component', () => { | ||||
|     createComponent(); | ||||
|     expect(wrapper.exists()).toBe(true); | ||||
|   }); | ||||
| 
 | ||||
|   it('renders TodoItemTitle component', () => { | ||||
|     createComponent(); | ||||
|     expect(wrapper.findComponent(TodoItemTitle).exists()).toBe(true); | ||||
|   }); | ||||
| 
 | ||||
|   it('renders TodoItemBody component', () => { | ||||
|     createComponent(); | ||||
|     expect(wrapper.findComponent(TodoItemBody).exists()).toBe(true); | ||||
|   }); | ||||
| 
 | ||||
|   it('renders TodoItemTimestamp component', () => { | ||||
|     createComponent(); | ||||
|     expect(wrapper.findComponent(TodoItemTimestamp).exists()).toBe(true); | ||||
|   }); | ||||
| 
 | ||||
|   it('renders TodoItemActions component', () => { | ||||
|     createComponent(); | ||||
|     expect(wrapper.findComponent(TodoItemActions).exists()).toBe(true); | ||||
|   }); | ||||
| 
 | ||||
|   describe('computed properties', () => { | ||||
|     it('isDone returns true when todo state is done', () => { | ||||
|       createComponent({ todo: { state: TODO_STATE_DONE } }); | ||||
|       expect(wrapper.vm.isDone).toBe(true); | ||||
|     }); | ||||
| 
 | ||||
|     it('isPending returns true when todo state is pending', () => { | ||||
|       createComponent({ todo: { state: TODO_STATE_PENDING } }); | ||||
|       expect(wrapper.vm.isPending).toBe(true); | ||||
|     }); | ||||
|   }); | ||||
| 
 | ||||
|   it('emits change event when TodoItemActions emits change', async () => { | ||||
|     createComponent(); | ||||
|     const todoItemActions = wrapper.findComponent(TodoItemActions); | ||||
|     await todoItemActions.vm.$emit('change', '1', true); | ||||
|     expect(wrapper.emitted('change')).toEqual([['1', true]]); | ||||
|   }); | ||||
| }); | ||||
|  | @ -2,7 +2,7 @@ | |||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe Resolvers::TodosResolver, feature_category: :team_planning do | ||||
| RSpec.describe Resolvers::TodosResolver, feature_category: :notifications do | ||||
|   include GraphqlHelpers | ||||
|   include DesignManagementTestHelpers | ||||
| 
 | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ | |||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe GitlabSchema.types['Todo'], feature_category: :team_planning do | ||||
| RSpec.describe GitlabSchema.types['Todo'], feature_category: :notifications do | ||||
|   let_it_be(:current_user) { create(:user) } | ||||
|   let_it_be(:author) { create(:user) } | ||||
| 
 | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ | |||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe Types::TodoableInterface, feature_category: :team_planning do | ||||
| RSpec.describe Types::TodoableInterface, feature_category: :notifications do | ||||
|   include GraphqlHelpers | ||||
| 
 | ||||
|   it 'exposes the expected fields' do | ||||
|  |  | |||
|  | @ -137,4 +137,38 @@ RSpec.describe Keeps::Helpers::FileHelper, feature_category: :tooling do | |||
|       end | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   describe '#replace_as_string' do | ||||
|     let(:filename) { 'file.txt' } | ||||
|     let(:new_milestone) { '17.5' } | ||||
|     let(:parsed_file) do | ||||
|       <<~RUBY | ||||
|         # Migration type +class+ | ||||
|         # frozen_string_literal: true | ||||
| 
 | ||||
|         # See https://docs.gitlab.com/ee/development/migration_style_guide.html | ||||
|         # for more information on how to write migrations for GitLab. | ||||
| 
 | ||||
|         =begin | ||||
|           This migration adds | ||||
|           a new column to project | ||||
|         =end | ||||
|         class AddColToProjects < Gitlab::Database::Migration[2.2] | ||||
|           milestone #{new_milestone} # Inline comment | ||||
| 
 | ||||
|           def change | ||||
|             add_column :projects, :bool_col, :boolean, default: false, null: false # adds a new column | ||||
|           end | ||||
|         end# Another inline comment | ||||
|       RUBY | ||||
|     end | ||||
| 
 | ||||
|     before do | ||||
|       described_class.def_node_matcher(:milestone_node, '`(send nil? :milestone $(str _) ...)') | ||||
|     end | ||||
| 
 | ||||
|     it 'parses the file as expected' do | ||||
|       expect(helper.replace_as_string(helper.milestone_node, new_milestone)).to eq(parsed_file) | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  |  | |||
|  | @ -168,4 +168,12 @@ RSpec.describe Gitlab::Ci::Build::Context::Build, feature_category: :pipeline_co | |||
| 
 | ||||
|     it_behaves_like 'variables collection' | ||||
|   end | ||||
| 
 | ||||
|   describe '#variables_hash_expanded' do | ||||
|     subject { context.variables_hash_expanded } | ||||
| 
 | ||||
|     it { expect(context.variables_hash_expanded).to be_instance_of(ActiveSupport::HashWithIndifferentAccess) } | ||||
| 
 | ||||
|     it_behaves_like 'variables collection' | ||||
|   end | ||||
| end | ||||
|  |  | |||
|  | @ -158,10 +158,35 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes, feature_category | |||
|         end | ||||
| 
 | ||||
|         before do | ||||
|           allow(context).to receive(:variables_hash).and_return(variables_hash) | ||||
|           allow(context).to receive(:variables_hash_expanded).and_return(variables_hash) | ||||
|         end | ||||
| 
 | ||||
|         it { is_expected.to be_truthy } | ||||
| 
 | ||||
|         context 'when the variable is nested' do | ||||
|           let(:variables_hash) do | ||||
|             { 'HELM_DIR' => 'he$SUFFIX', 'SUFFIX' => 'lm' } | ||||
|           end | ||||
| 
 | ||||
|           let(:variables_hash_expanded) do | ||||
|             { 'HELM_DIR' => 'helm', 'SUFFIX' => 'lm' } | ||||
|           end | ||||
| 
 | ||||
|           before do | ||||
|             allow(context).to receive(:variables_hash_expanded).and_return(variables_hash_expanded) | ||||
|           end | ||||
| 
 | ||||
|           it { is_expected.to be_truthy } | ||||
| 
 | ||||
|           context 'when expand_nested_variables_in_job_rules_exists_and_changes is disabled' do | ||||
|             before do | ||||
|               stub_feature_flags(expand_nested_variables_in_job_rules_exists_and_changes: false) | ||||
|               allow(context).to receive(:variables_hash).and_return(variables_hash) | ||||
|             end | ||||
| 
 | ||||
|             it { is_expected.to be_falsey } | ||||
|           end | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       context 'when variable expansion does not match' do | ||||
|  | @ -169,7 +194,7 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes, feature_category | |||
|         let(:modified_paths) { ['path/with/$in/it/file.txt'] } | ||||
| 
 | ||||
|         before do | ||||
|           allow(context).to receive(:variables_hash).and_return({}) | ||||
|           allow(context).to receive(:variables_hash_expanded).and_return({}) | ||||
|         end | ||||
| 
 | ||||
|         it { is_expected.to be_truthy } | ||||
|  | @ -263,10 +288,54 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes, feature_category | |||
|         let(:pipeline) { build(:ci_pipeline, project: project, ref: 'feature_2', sha: project.commit('feature_2').sha) } | ||||
| 
 | ||||
|         before do | ||||
|           allow(context).to receive(:variables_hash).and_return(variables_hash) | ||||
|           allow(context).to receive(:variables_hash_expanded).and_return(variables_hash) | ||||
|         end | ||||
| 
 | ||||
|         it { is_expected.to be_truthy } | ||||
| 
 | ||||
|         context 'when expand_nested_variables_in_job_rules_exists_and_changes is disabled' do | ||||
|           before do | ||||
|             stub_feature_flags(expand_nested_variables_in_job_rules_exists_and_changes: false) | ||||
|             allow(context).to receive(:variables_hash).and_return(variables_hash) | ||||
|           end | ||||
| 
 | ||||
|           it { is_expected.to be_truthy } | ||||
|         end | ||||
| 
 | ||||
|         context 'when the variable is nested' do | ||||
|           let(:context) { instance_double(Gitlab::Ci::Build::Context::Base) } | ||||
|           let(:variables_hash) do | ||||
|             { 'FEATURE_BRANCH_NAME_PREFIX' => 'feature_', 'NESTED_REF_VAR' => '${FEATURE_BRANCH_NAME_PREFIX}1' } | ||||
|           end | ||||
| 
 | ||||
|           let(:variables_hash_expanded) do | ||||
|             { 'FEATURE_BRANCH_NAME_PREFIX' => 'feature_', 'NESTED_REF_VAR' => 'feature_1' } | ||||
|           end | ||||
| 
 | ||||
|           let(:globs) { { paths: ['file2.txt'], compare_to: '$NESTED_REF_VAR' } } | ||||
|           let(:pipeline) do | ||||
|             build(:ci_pipeline, project: project, ref: 'feature_2', sha: project.commit('feature_2').sha) | ||||
|           end | ||||
| 
 | ||||
|           before do | ||||
|             allow(context).to receive(:variables_hash_expanded).and_return(variables_hash_expanded) | ||||
|           end | ||||
| 
 | ||||
|           it { is_expected.to be_truthy } | ||||
| 
 | ||||
|           context 'when expand_nested_variables_in_job_rules_exists_and_changes is disabled' do | ||||
|             before do | ||||
|               stub_feature_flags(expand_nested_variables_in_job_rules_exists_and_changes: false) | ||||
|               allow(context).to receive(:variables_hash).and_return(variables_hash) | ||||
|             end | ||||
| 
 | ||||
|             it 'raises ParseError' do | ||||
|               expect { satisfied_by }.to raise_error( | ||||
|                 ::Gitlab::Ci::Build::Rules::Rule::Clause::ParseError, 'rules:changes:compare_to is not a valid ref' | ||||
|               ) | ||||
|             end | ||||
|           end | ||||
|         end | ||||
|       end | ||||
|     end | ||||
|   end | ||||
|  |  | |||
|  | @ -6,6 +6,7 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Exists, feature_category: | |||
|   let_it_be(:user) { create(:user) } | ||||
|   let_it_be(:project) { create(:project, :small_repo, files: { 'subdir/my_file.txt' => '' }) } | ||||
|   let_it_be(:other_project) { create(:project, :small_repo, files: { 'file.txt' => '' }) } | ||||
|   let(:pipeline) { instance_double(Ci::Pipeline, project: project, sha: 'sha', user: user) } | ||||
| 
 | ||||
|   let(:variables) do | ||||
|     Gitlab::Ci::Variables::Collection.new([ | ||||
|  | @ -13,6 +14,7 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Exists, feature_category: | |||
|       { key: 'FILE_TXT', value: 'file.txt' }, | ||||
|       { key: 'FULL_PATH_VALID', value: 'subdir/my_file.txt' }, | ||||
|       { key: 'FULL_PATH_INVALID', value: 'subdir/does_not_exist.txt' }, | ||||
|       { key: 'NESTED_FULL_PATH_VALID', value: '$SUBDIR/my_file.txt' }, | ||||
|       { key: 'NEW_BRANCH', value: 'new_branch' }, | ||||
|       { key: 'MASKED_VAR', value: 'masked_value', masked: true } | ||||
|     ]) | ||||
|  | @ -29,7 +31,7 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Exists, feature_category: | |||
|   end | ||||
| 
 | ||||
|   describe '#satisfied_by?' do | ||||
|     subject(:satisfied_by?) { described_class.new(clause).satisfied_by?(nil, context) } | ||||
|     subject(:satisfied_by?) { described_class.new(clause).satisfied_by?(pipeline, context) } | ||||
| 
 | ||||
|     before do | ||||
|       allow(context).to receive(:variables).and_return(variables) | ||||
|  | @ -65,6 +67,20 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Exists, feature_category: | |||
| 
 | ||||
|           it { is_expected.to be_falsey } | ||||
|         end | ||||
| 
 | ||||
|         context 'when the variable is nested and matches' do | ||||
|           let(:globs) { ['$NESTED_FULL_PATH_VALID'] } | ||||
| 
 | ||||
|           it { is_expected.to be_truthy } | ||||
| 
 | ||||
|           context 'when expand_nested_variables_in_job_rules_exists_and_changes is disabled' do | ||||
|             before do | ||||
|               stub_feature_flags(expand_nested_variables_in_job_rules_exists_and_changes: false) | ||||
|             end | ||||
| 
 | ||||
|             it { is_expected.to be_falsey } | ||||
|           end | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       context 'when a file path has a variable' do | ||||
|  | @ -114,6 +130,14 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Exists, feature_category: | |||
|             let(:globs) { ['$FILE_TXT'] } | ||||
| 
 | ||||
|             it { is_expected.to be_truthy } | ||||
| 
 | ||||
|             context 'when expand_nested_variables_in_job_rules_exists_and_changes is disabled' do | ||||
|               before do | ||||
|                 stub_feature_flags(expand_nested_variables_in_job_rules_exists_and_changes: false) | ||||
|               end | ||||
| 
 | ||||
|               it { is_expected.to be_truthy } | ||||
|             end | ||||
|           end | ||||
| 
 | ||||
|           context 'when the project path is invalid' do | ||||
|  | @ -135,6 +159,19 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Exists, feature_category: | |||
|                   "rules:exists:project `invalid/path/subdir` is not a valid project path" | ||||
|                 ) | ||||
|               end | ||||
| 
 | ||||
|               context 'when expand_nested_variables_in_job_rules_exists_and_changes is disabled' do | ||||
|                 before do | ||||
|                   stub_feature_flags(expand_nested_variables_in_job_rules_exists_and_changes: false) | ||||
|                 end | ||||
| 
 | ||||
|                 it 'raises an error' do | ||||
|                   expect { satisfied_by? }.to raise_error( | ||||
|                     Gitlab::Ci::Build::Rules::Rule::Clause::ParseError, | ||||
|                     "rules:exists:project `invalid/path/subdir` is not a valid project path" | ||||
|                   ) | ||||
|                 end | ||||
|               end | ||||
|             end | ||||
| 
 | ||||
|             context 'when the project path contains a masked variable' do | ||||
|  | @ -165,6 +202,14 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Exists, feature_category: | |||
|               let(:ref) { '$NEW_BRANCH' } | ||||
| 
 | ||||
|               it { is_expected.to be_truthy } | ||||
| 
 | ||||
|               context 'when expand_nested_variables_in_job_rules_exists_and_changes is disabled' do | ||||
|                 before do | ||||
|                   stub_feature_flags(expand_nested_variables_in_job_rules_exists_and_changes: false) | ||||
|                 end | ||||
| 
 | ||||
|                 it { is_expected.to be_truthy } | ||||
|               end | ||||
|             end | ||||
| 
 | ||||
|             context 'when the ref is invalid' do | ||||
|  | @ -187,6 +232,20 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Exists, feature_category: | |||
|                     "in project `#{other_project.full_path}`" | ||||
|                   ) | ||||
|                 end | ||||
| 
 | ||||
|                 context 'when expand_nested_variables_in_job_rules_exists_and_changes is disabled' do | ||||
|                   before do | ||||
|                     stub_feature_flags(expand_nested_variables_in_job_rules_exists_and_changes: false) | ||||
|                   end | ||||
| 
 | ||||
|                   it 'raises an error' do | ||||
|                     expect { satisfied_by? }.to raise_error( | ||||
|                       Gitlab::Ci::Build::Rules::Rule::Clause::ParseError, | ||||
|                       "rules:exists:ref `invalid/ref/new_branch` is not a valid ref " \ | ||||
|                         "in project `#{other_project.full_path}`" | ||||
|                     ) | ||||
|                   end | ||||
|                 end | ||||
|               end | ||||
| 
 | ||||
|               context 'when the ref contains a masked variable' do | ||||
|  |  | |||
|  | @ -5,7 +5,7 @@ require 'spec_helper' | |||
| RSpec.describe Gitlab::Ci::Config::External::Rules, feature_category: :pipeline_composition do | ||||
|   let(:context) { double(variables_hash: {}) } | ||||
|   let(:rule_hashes) {} | ||||
|   let(:pipeline) { instance_double(Ci::Pipeline, project_id: project.id, sha: 'sha') } | ||||
|   let(:pipeline) { instance_double(Ci::Pipeline, project: project, project_id: project.id, sha: 'sha') } | ||||
|   let_it_be(:project) { create(:project, :custom_repo, files: { 'file.txt' => 'file' }) } | ||||
| 
 | ||||
|   subject(:rules) { described_class.new(rule_hashes) } | ||||
|  |  | |||
|  | @ -77,36 +77,6 @@ RSpec.describe Ci::PipelineCreation::Requests, :clean_gitlab_redis_shared_state, | |||
|         expect(described_class.pipeline_creating_for_merge_request?(merge_request)).to be_falsey | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     context 'when delete_if_all_complete is true' do | ||||
|       context 'when there are only finished creations for the merge request' do | ||||
|         it 'deletes the MR pipeline creations key from Redis' do | ||||
|           request_1 = described_class.start_for_merge_request(merge_request) | ||||
|           request_2 = described_class.start_for_merge_request(merge_request) | ||||
|           described_class.succeeded(request_1) | ||||
|           described_class.failed(request_2) | ||||
| 
 | ||||
|           expect(described_class.pipeline_creating_for_merge_request?(merge_request, delete_if_all_complete: true)) | ||||
|             .to be_falsey | ||||
|           expect(read(request_1)).to be_nil | ||||
|           expect(read(request_2)).to be_nil | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       context 'when there are unfinished creations for the merge request' do | ||||
|         it 'does not delete the MR pipeline creations key from Redis' do | ||||
|           request_1 = described_class.start_for_merge_request(merge_request) | ||||
|           request_2 = described_class.start_for_merge_request(merge_request) | ||||
|           described_class.start_for_merge_request(merge_request) | ||||
|           described_class.succeeded(request_1) | ||||
| 
 | ||||
|           expect(described_class.pipeline_creating_for_merge_request?(merge_request, delete_if_all_complete: false)) | ||||
|             .to be_truthy | ||||
|           expect(read(request_1)).to eq({ 'status' => 'succeeded' }) | ||||
|           expect(read(request_2)).to eq({ 'status' => 'in_progress' }) | ||||
|         end | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   describe '.hset' do | ||||
|  |  | |||
|  | @ -277,6 +277,23 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do | |||
|     end | ||||
|   end | ||||
| 
 | ||||
|   describe '#owner' do | ||||
|     subject(:owner) { runner.owner } | ||||
| 
 | ||||
|     context 'when runner does not have creator_id' do | ||||
|       let_it_be(:runner) { create(:ci_runner, :instance) } | ||||
| 
 | ||||
|       it { is_expected.to be_nil } | ||||
|     end | ||||
| 
 | ||||
|     context 'when runner has creator' do | ||||
|       let_it_be(:creator) { create(:user) } | ||||
|       let_it_be(:runner) { create(:ci_runner, :instance, creator: creator) } | ||||
| 
 | ||||
|       it { is_expected.to eq creator } | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   describe '.instance_type' do | ||||
|     let!(:group_runner) { create(:ci_runner, :group, groups: [group]) } | ||||
|     let!(:project_runner) { create(:ci_runner, :project, projects: [project]) } | ||||
|  | @ -1125,8 +1142,8 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do | |||
|     let_it_be(:project1) { create(:project) } | ||||
|     let_it_be(:project2) { create(:project) } | ||||
| 
 | ||||
|     describe '#owner_project' do | ||||
|       subject(:owner_project) { project_runner.owner_project } | ||||
|     describe '#owner' do | ||||
|       subject(:owner) { project_runner.owner } | ||||
| 
 | ||||
|       context 'with project1 as first project associated with runner' do | ||||
|         let_it_be(:project_runner) { create(:ci_runner, :project, projects: [project1, project2]) } | ||||
|  | @ -1638,6 +1655,22 @@ RSpec.describe Ci::Runner, type: :model, feature_category: :runner do | |||
|         end | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     describe '#owner' do | ||||
|       subject(:owner) { runner.owner } | ||||
| 
 | ||||
|       context 'with runner assigned to child_group' do | ||||
|         let(:runner) { child_group_runner } | ||||
| 
 | ||||
|         it { is_expected.to eq child_group } | ||||
|       end | ||||
| 
 | ||||
|       context 'with runner assigned to top_level_group_runner' do | ||||
|         let(:runner) { top_level_group_runner } | ||||
| 
 | ||||
|         it { is_expected.to eq top_level_group } | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| 
 | ||||
|   describe '#short_sha' do | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ | |||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe Todo, feature_category: :team_planning do | ||||
| RSpec.describe Todo, feature_category: :notifications do | ||||
|   let(:issue) { create(:issue) } | ||||
| 
 | ||||
|   describe 'relationships' do | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ | |||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe TodoPolicy, feature_category: :team_planning do | ||||
| RSpec.describe TodoPolicy, feature_category: :notifications do | ||||
|   using RSpec::Parameterized::TableSyntax | ||||
| 
 | ||||
|   let_it_be(:project) { create(:project) } | ||||
|  |  | |||
|  | @ -3,7 +3,7 @@ | |||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe 'A Todoable that implements the CurrentUserTodos interface', | ||||
|   feature_category: :team_planning do | ||||
|   feature_category: :notifications do | ||||
|   include GraphqlHelpers | ||||
| 
 | ||||
|   let_it_be(:current_user) { create(:user) } | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ | |||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe 'Todo Query', feature_category: :team_planning do | ||||
| RSpec.describe 'Todo Query', feature_category: :notifications do | ||||
|   include GraphqlHelpers | ||||
| 
 | ||||
|   let_it_be(:current_user) { nil } | ||||
|  |  | |||
|  | @ -1,66 +0,0 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe Ci::DagJobEntity do | ||||
|   let_it_be(:request) { double(:request) } | ||||
| 
 | ||||
|   let(:job) { create(:ci_build, name: 'dag_job') } | ||||
|   let(:entity) { described_class.new(job, request: request) } | ||||
| 
 | ||||
|   describe '#as_json' do | ||||
|     subject { entity.as_json } | ||||
| 
 | ||||
|     RSpec.shared_examples "matches schema" do | ||||
|       it "matches schema" do | ||||
|         expect(subject.to_json).to match_schema('entities/dag_job') | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     it 'contains the name' do | ||||
|       expect(subject[:name]).to eq 'dag_job' | ||||
|     end | ||||
| 
 | ||||
|     it_behaves_like "matches schema" | ||||
| 
 | ||||
|     context 'when job is stage scheduled' do | ||||
|       it 'contains the name scheduling_type' do | ||||
|         expect(subject[:scheduling_type]).to eq 'stage' | ||||
|       end | ||||
| 
 | ||||
|       it 'does not expose needs' do | ||||
|         expect(subject).not_to include(:needs) | ||||
|       end | ||||
| 
 | ||||
|       it_behaves_like "matches schema" | ||||
|     end | ||||
| 
 | ||||
|     context 'when job is dag scheduled' do | ||||
|       let(:job) { create(:ci_build, scheduling_type: 'dag') } | ||||
| 
 | ||||
|       it 'contains the name scheduling_type' do | ||||
|         expect(subject[:scheduling_type]).to eq 'dag' | ||||
|       end | ||||
| 
 | ||||
|       it_behaves_like "matches schema" | ||||
| 
 | ||||
|       context 'when job has needs' do | ||||
|         let!(:need) { create(:ci_build_need, build: job, name: 'compile') } | ||||
| 
 | ||||
|         it 'exposes the array of needs' do | ||||
|           expect(subject[:needs]).to eq ['compile'] | ||||
|         end | ||||
| 
 | ||||
|         it_behaves_like "matches schema" | ||||
|       end | ||||
| 
 | ||||
|       context 'when job has empty needs' do | ||||
|         it 'exposes an empty array of needs' do | ||||
|           expect(subject[:needs]).to eq [] | ||||
|         end | ||||
| 
 | ||||
|         it_behaves_like "matches schema" | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -1,66 +0,0 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe Ci::DagJobGroupEntity do | ||||
|   let_it_be(:request) { double(:request) } | ||||
|   let_it_be(:pipeline) { create(:ci_pipeline) } | ||||
|   let_it_be(:stage) { create(:ci_stage, pipeline: pipeline) } | ||||
| 
 | ||||
|   let(:group) { Ci::Group.new(pipeline.project, stage, name: 'test', jobs: jobs) } | ||||
|   let(:entity) { described_class.new(group, request: request) } | ||||
| 
 | ||||
|   describe '#as_json' do | ||||
|     subject { entity.as_json } | ||||
| 
 | ||||
|     context 'when group contains 1 job' do | ||||
|       let(:job) { create(:ci_build, stage_id: stage.id, pipeline: pipeline, name: 'test') } | ||||
|       let(:jobs) { [job] } | ||||
| 
 | ||||
|       it 'exposes a name' do | ||||
|         expect(subject.fetch(:name)).to eq 'test' | ||||
|       end | ||||
| 
 | ||||
|       it 'exposes the size' do | ||||
|         expect(subject.fetch(:size)).to eq 1 | ||||
|       end | ||||
| 
 | ||||
|       it 'exposes the jobs' do | ||||
|         exposed_jobs = subject.fetch(:jobs) | ||||
| 
 | ||||
|         expect(exposed_jobs.size).to eq 1 | ||||
|         expect(exposed_jobs.first.fetch(:name)).to eq 'test' | ||||
|       end | ||||
| 
 | ||||
|       it 'matches schema' do | ||||
|         expect(subject.to_json).to match_schema('entities/dag_job_group') | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     context 'when group contains multiple parallel jobs' do | ||||
|       let(:job_1) { create(:ci_build, stage_id: stage.id, pipeline: pipeline, name: 'test 1/2') } | ||||
|       let(:job_2) { create(:ci_build, stage_id: stage.id, pipeline: pipeline, name: 'test 2/2') } | ||||
|       let(:jobs) { [job_1, job_2] } | ||||
| 
 | ||||
|       it 'exposes a name' do | ||||
|         expect(subject.fetch(:name)).to eq 'test' | ||||
|       end | ||||
| 
 | ||||
|       it 'exposes the size' do | ||||
|         expect(subject.fetch(:size)).to eq 2 | ||||
|       end | ||||
| 
 | ||||
|       it 'exposes the jobs' do | ||||
|         exposed_jobs = subject.fetch(:jobs) | ||||
| 
 | ||||
|         expect(exposed_jobs.size).to eq 2 | ||||
|         expect(exposed_jobs.first.fetch(:name)).to eq 'test 1/2' | ||||
|         expect(exposed_jobs.last.fetch(:name)).to eq 'test 2/2' | ||||
|       end | ||||
| 
 | ||||
|       it 'matches schema' do | ||||
|         expect(subject.to_json).to match_schema('entities/dag_job_group') | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -1,163 +0,0 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe Ci::DagPipelineEntity do | ||||
|   let_it_be(:request) { double(:request) } | ||||
| 
 | ||||
|   let_it_be(:pipeline) { create(:ci_pipeline) } | ||||
| 
 | ||||
|   let(:entity) { described_class.new(pipeline, request: request) } | ||||
| 
 | ||||
|   describe '#as_json' do | ||||
|     subject { entity.as_json } | ||||
| 
 | ||||
|     RSpec.shared_examples "matches schema" do | ||||
|       it 'matches schema' do | ||||
|         expect(subject.to_json).to match_schema('entities/dag_pipeline') | ||||
|       end | ||||
|     end | ||||
| 
 | ||||
|     context 'when pipeline is empty' do | ||||
|       it 'contains stages' do | ||||
|         expect(subject).to include(:stages) | ||||
| 
 | ||||
|         expect(subject[:stages]).to be_empty | ||||
|       end | ||||
| 
 | ||||
|       it_behaves_like "matches schema" | ||||
|     end | ||||
| 
 | ||||
|     context 'when pipeline has jobs' do | ||||
|       let_it_be(:build_stage) { create(:ci_stage, name: 'build', pipeline: pipeline) } | ||||
|       let_it_be(:test_stage) { create(:ci_stage, name: 'test', pipeline: pipeline) } | ||||
|       let_it_be(:deploy_stage) { create(:ci_stage, name: 'deploy', pipeline: pipeline) } | ||||
| 
 | ||||
|       let!(:build_job)  { create(:ci_build, ci_stage: build_stage,  pipeline: pipeline) } | ||||
|       let!(:test_job)   { create(:ci_build, ci_stage: test_stage,   pipeline: pipeline) } | ||||
|       let!(:deploy_job) { create(:ci_build, ci_stage: deploy_stage, pipeline: pipeline) } | ||||
| 
 | ||||
|       it 'contains 3 stages' do | ||||
|         stages = subject[:stages] | ||||
| 
 | ||||
|         expect(stages.size).to eq 3 | ||||
|         expect(stages.map { |s| s[:name] }).to contain_exactly('build', 'test', 'deploy') | ||||
|       end | ||||
| 
 | ||||
|       it_behaves_like "matches schema" | ||||
|     end | ||||
| 
 | ||||
|     context 'when pipeline has parallel jobs, DAG needs and GenericCommitStatus' do | ||||
|       let!(:stage_build)  { create(:ci_stage, name: 'build',  position: 1, pipeline: pipeline) } | ||||
|       let!(:stage_test)   { create(:ci_stage, name: 'test',   position: 2, pipeline: pipeline) } | ||||
|       let!(:stage_deploy) { create(:ci_stage, name: 'deploy', position: 3, pipeline: pipeline) } | ||||
| 
 | ||||
|       let!(:job_build_1)   { create(:ci_build, name: 'build 1', ci_stage: stage_build, pipeline: pipeline) } | ||||
|       let!(:job_build_2)   { create(:ci_build, name: 'build 2', ci_stage: stage_build, pipeline: pipeline) } | ||||
|       let!(:commit_status) { create(:generic_commit_status, ci_stage: stage_build, pipeline: pipeline) } | ||||
| 
 | ||||
|       let!(:job_rspec_1) { create(:ci_build, name: 'rspec 1/2', ci_stage: stage_test, pipeline: pipeline) } | ||||
|       let!(:job_rspec_2) { create(:ci_build, name: 'rspec 2/2', ci_stage: stage_test, pipeline: pipeline) } | ||||
| 
 | ||||
|       let!(:job_jest) do | ||||
|         create(:ci_build, name: 'jest', ci_stage: stage_test, scheduling_type: 'dag', pipeline: pipeline) | ||||
|           .tap do |job| | ||||
|           create(:ci_build_need, name: 'build 1', build: job) | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       let!(:job_deploy_ruby) do | ||||
|         create(:ci_build, name: 'deploy_ruby', ci_stage: stage_deploy, scheduling_type: 'dag', pipeline: pipeline) | ||||
|           .tap do |job| | ||||
|           create(:ci_build_need, name: 'rspec 1/2', build: job) | ||||
|           create(:ci_build_need, name: 'rspec 2/2', build: job) | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       let!(:job_deploy_js) do | ||||
|         create(:ci_build, name: 'deploy_js', ci_stage: stage_deploy, scheduling_type: 'dag', pipeline: pipeline) | ||||
|           .tap do |job| | ||||
|           create(:ci_build_need, name: 'jest', build: job) | ||||
|         end | ||||
|       end | ||||
| 
 | ||||
|       it 'performs the smallest number of queries', :request_store do | ||||
|         log = ActiveRecord::QueryRecorder.new { subject } | ||||
| 
 | ||||
|         # stages, project, builds, build_needs | ||||
|         expect(log.count).to eq 4 | ||||
|       end | ||||
| 
 | ||||
|       it 'contains all the data' do | ||||
|         expected_result = { | ||||
|           stages: [ | ||||
|             { | ||||
|               name: 'build', | ||||
|               groups: [ | ||||
|                 { | ||||
|                   name: 'build 1', size: 1, jobs: [ | ||||
|                     { name: 'build 1', scheduling_type: 'stage' } | ||||
|                   ] | ||||
|                 }, | ||||
|                 { | ||||
|                   name: 'build 2', size: 1, jobs: [ | ||||
|                     { name: 'build 2', scheduling_type: 'stage' } | ||||
|                   ] | ||||
|                 }, | ||||
|                 { | ||||
|                   name: 'generic', size: 1, jobs: [ | ||||
|                     { name: 'generic', scheduling_type: nil } | ||||
|                   ] | ||||
|                 } | ||||
|               ] | ||||
|             }, | ||||
|             { | ||||
|               name: 'test', | ||||
|               groups: [ | ||||
|                 { | ||||
|                   name: 'jest', size: 1, jobs: [ | ||||
|                     { name: 'jest', scheduling_type: 'dag', needs: ['build 1'] } | ||||
|                   ] | ||||
|                 }, | ||||
|                 { | ||||
|                   name: 'rspec', size: 2, jobs: [ | ||||
|                     { name: 'rspec 1/2', scheduling_type: 'stage' }, | ||||
|                     { name: 'rspec 2/2', scheduling_type: 'stage' } | ||||
|                   ] | ||||
|                 } | ||||
|               ] | ||||
|             }, | ||||
|             { | ||||
|               name: 'deploy', | ||||
|               groups: [ | ||||
|                 { | ||||
|                   name: 'deploy_js', size: 1, jobs: [ | ||||
|                     { name: 'deploy_js', scheduling_type: 'dag', needs: ['jest'] } | ||||
|                   ] | ||||
|                 }, | ||||
|                 { | ||||
|                   name: 'deploy_ruby', size: 1, jobs: [ | ||||
|                     { name: 'deploy_ruby', scheduling_type: 'dag', needs: ['rspec 1/2', 'rspec 2/2'] } | ||||
|                   ] | ||||
|                 } | ||||
|               ] | ||||
|             } | ||||
|           ] | ||||
|         } | ||||
| 
 | ||||
|         expect(subject.fetch(:stages)).not_to be_empty | ||||
| 
 | ||||
|         expect(subject.fetch(:stages)[0].fetch(:name)).to eq 'build' | ||||
|         expect(subject.fetch(:stages)[0]).to eq expected_result.fetch(:stages)[0] | ||||
| 
 | ||||
|         expect(subject.fetch(:stages)[1].fetch(:name)).to eq 'test' | ||||
|         expect(subject.fetch(:stages)[1]).to eq expected_result.fetch(:stages)[1] | ||||
| 
 | ||||
|         expect(subject.fetch(:stages)[2].fetch(:name)).to eq 'deploy' | ||||
|         expect(subject.fetch(:stages)[2]).to eq expected_result.fetch(:stages)[2] | ||||
|       end | ||||
| 
 | ||||
|       it_behaves_like "matches schema" | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -1,21 +0,0 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe Ci::DagPipelineSerializer do | ||||
|   describe '#represent' do | ||||
|     subject { described_class.new.represent(pipeline) } | ||||
| 
 | ||||
|     let(:pipeline) { create(:ci_pipeline) } | ||||
|     let!(:job) { create(:ci_build, pipeline: pipeline) } | ||||
| 
 | ||||
|     it 'includes stages' do | ||||
|       expect(subject[:stages]).to be_present | ||||
|       expect(subject[:stages].size).to eq 1 | ||||
|     end | ||||
| 
 | ||||
|     it 'matches schema' do | ||||
|       expect(subject.to_json).to match_schema('entities/dag_pipeline') | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -1,35 +0,0 @@ | |||
| # frozen_string_literal: true | ||||
| 
 | ||||
| require 'spec_helper' | ||||
| 
 | ||||
| RSpec.describe Ci::DagStageEntity do | ||||
|   let_it_be(:pipeline) { create(:ci_pipeline) } | ||||
|   let_it_be(:request) { double(:request) } | ||||
| 
 | ||||
|   let(:stage) { create(:ci_stage, pipeline: pipeline, name: 'test') } | ||||
|   let(:entity) { described_class.new(stage, request: request) } | ||||
| 
 | ||||
|   let!(:job) { create(:ci_build, :success, pipeline: pipeline, stage_id: stage.id) } | ||||
| 
 | ||||
|   describe '#as_json' do | ||||
|     subject { entity.as_json } | ||||
| 
 | ||||
|     it 'contains valid name' do | ||||
|       expect(subject[:name]).to eq 'test' | ||||
|     end | ||||
| 
 | ||||
|     it 'contains the job groups' do | ||||
|       expect(subject).to include :groups | ||||
|       expect(subject[:groups]).not_to be_empty | ||||
| 
 | ||||
|       job_group = subject[:groups].first | ||||
|       expect(job_group[:name]).to eq 'test' | ||||
|       expect(job_group[:size]).to eq 1 | ||||
|       expect(job_group[:jobs]).not_to be_empty | ||||
|     end | ||||
| 
 | ||||
|     it "matches schema" do | ||||
|       expect(subject.to_json).to match_schema('entities/dag_stage') | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | @ -227,7 +227,7 @@ RSpec.describe Ci::CreatePipelineService, feature_category: :pipeline_compositio | |||
|           script: echo Hello, World! | ||||
|           rules: | ||||
|             - exists: | ||||
|               - $VAR_NESTED # does not match because of https://gitlab.com/gitlab-org/gitlab/-/issues/411344 | ||||
|               - $VAR_NESTED | ||||
|         YAML | ||||
|       end | ||||
| 
 | ||||
|  | @ -247,7 +247,18 @@ RSpec.describe Ci::CreatePipelineService, feature_category: :pipeline_compositio | |||
| 
 | ||||
|         it 'creates all relevant jobs' do | ||||
|           expect(pipeline).to be_persisted | ||||
|           expect(build_names).to contain_exactly('job1', 'job2') | ||||
|           expect(build_names).to contain_exactly('job1', 'job2', 'job4') | ||||
|         end | ||||
| 
 | ||||
|         context 'when expand_nested_variables_in_job_rules_exists_and_changes is disabled' do | ||||
|           before do | ||||
|             stub_feature_flags(expand_nested_variables_in_job_rules_exists_and_changes: false) | ||||
|           end | ||||
| 
 | ||||
|           it 'creates all relevant jobs' do | ||||
|             expect(pipeline).to be_persisted | ||||
|             expect(build_names).to contain_exactly('job1', 'job2') | ||||
|           end | ||||
|         end | ||||
|       end | ||||
|     end | ||||
|  | @ -808,6 +819,10 @@ RSpec.describe Ci::CreatePipelineService, feature_category: :pipeline_compositio | |||
|                 VALID_BRANCH_NAME: feature_1 | ||||
|                 FEATURE_BRANCH_NAME_PREFIX: feature_ | ||||
|                 INVALID_BRANCH_NAME: invalid-branch | ||||
|                 VALID_FILENAME: file2.txt | ||||
|                 INVALID_FILENAME: file1.txt | ||||
|                 VALID_BASENAME: file2 | ||||
|                 VALID_NESTED_VARIABLE: ${VALID_BASENAME}.txt | ||||
|               job1: | ||||
|                 script: exit 0 | ||||
|                 rules: | ||||
|  | @ -857,6 +872,52 @@ RSpec.describe Ci::CreatePipelineService, feature_category: :pipeline_compositio | |||
|                 ) | ||||
|               end | ||||
|             end | ||||
| 
 | ||||
|             context 'when paths is defined by a variable' do | ||||
|               let(:compare_to) { '${VALID_BRANCH_NAME}' } | ||||
| 
 | ||||
|               context 'when the variable does not exist' do | ||||
|                 let(:changed_file) { '$NON_EXISTENT_VAR' } | ||||
| 
 | ||||
|                 it 'does not create job1' do | ||||
|                   expect(build_names).to contain_exactly('job2') | ||||
|                 end | ||||
|               end | ||||
| 
 | ||||
|               context 'when the variable contains a matching filename' do | ||||
|                 let(:changed_file) { '$VALID_FILENAME' } | ||||
| 
 | ||||
|                 it 'creates both jobs' do | ||||
|                   expect(build_names).to contain_exactly('job1', 'job2') | ||||
|                 end | ||||
|               end | ||||
| 
 | ||||
|               context 'when the variable does not contain a matching filename' do | ||||
|                 let(:changed_file) { '$INVALID_FILENAME' } | ||||
| 
 | ||||
|                 it 'does not create job1' do | ||||
|                   expect(build_names).to contain_exactly('job2') | ||||
|                 end | ||||
|               end | ||||
| 
 | ||||
|               context 'when the variable is nested and contains a matching filename' do | ||||
|                 let(:changed_file) { '$VALID_NESTED_VARIABLE' } | ||||
| 
 | ||||
|                 it 'creates both jobs' do | ||||
|                   expect(build_names).to contain_exactly('job1', 'job2') | ||||
|                 end | ||||
| 
 | ||||
|                 context 'when expand_nested_variables_in_job_rules_exists_and_changes is disabled' do | ||||
|                   before do | ||||
|                     stub_feature_flags(expand_nested_variables_in_job_rules_exists_and_changes: false) | ||||
|                   end | ||||
| 
 | ||||
|                   it 'does not create job1' do | ||||
|                     expect(build_names).to contain_exactly('job2') | ||||
|                   end | ||||
|                 end | ||||
|               end | ||||
|             end | ||||
|           end | ||||
|         end | ||||
| 
 | ||||
|  |  | |||
Some files were not shown because too many files have changed in this diff Show More
		Loading…
	
		Reference in New Issue