diff --git a/app/assets/javascripts/groups/settings/components/access_dropdown.vue b/app/assets/javascripts/groups/settings/components/access_dropdown.vue index f8cdb1cba44..e1d2a7fa84f 100644 --- a/app/assets/javascripts/groups/settings/components/access_dropdown.vue +++ b/app/assets/javascripts/groups/settings/components/access_dropdown.vue @@ -299,7 +299,7 @@ export default { data-testid="role-dropdown-item" is-check-item :is-checked="isSelected(role)" - @click.native.capture.stop="onItemClick(role)" + @click.capture.native.stop="onItemClick(role)" > {{ role.text }} @@ -316,7 +316,7 @@ export default { :avatar-url="group.avatar_url" is-check-item :is-checked="isSelected(group)" - @click.native.capture.stop="onItemClick(group)" + @click.capture.native.stop="onItemClick(group)" > {{ group.name }} @@ -335,7 +335,7 @@ export default { :secondary-text="user.username" is-check-item :is-checked="isSelected(user)" - @click.native.capture.stop="onItemClick(user)" + @click.capture.native.stop="onItemClick(user)" > {{ user.name }} diff --git a/app/assets/javascripts/projects/settings/components/access_dropdown.vue b/app/assets/javascripts/projects/settings/components/access_dropdown.vue index 675818d6273..135e926cedf 100644 --- a/app/assets/javascripts/projects/settings/components/access_dropdown.vue +++ b/app/assets/javascripts/projects/settings/components/access_dropdown.vue @@ -454,7 +454,7 @@ export default { data-testid="role-dropdown-item" is-check-item :is-checked="isSelected(role)" - @click.native.capture.stop="onItemClick(role)" + @click.capture.native.stop="onItemClick(role)" > {{ role.text }} @@ -472,7 +472,7 @@ export default { :avatar-url="group.avatar_url" is-check-item :is-checked="isSelected(group)" - @click.native.capture.stop="onItemClick(group)" + @click.capture.native.stop="onItemClick(group)" > {{ group.name }} @@ -491,7 +491,7 @@ export default { :secondary-text="user.username" is-check-item :is-checked="isSelected(user)" - @click.native.capture.stop="onItemClick(user)" + @click.capture.native.stop="onItemClick(user)" > {{ user.name }} @@ -509,7 +509,7 @@ export default { is-check-item :is-checked="isSelected(key)" class="gl-truncate" - @click.native.capture.stop="onItemClick(key)" + @click.capture.native.stop="onItemClick(key)" >
{{ key.title }}
diff --git a/app/assets/javascripts/security_configuration/components/app.vue b/app/assets/javascripts/security_configuration/components/app.vue index ef603c7804e..623596a35eb 100644 --- a/app/assets/javascripts/security_configuration/components/app.vue +++ b/app/assets/javascripts/security_configuration/components/app.vue @@ -6,6 +6,7 @@ import UserCalloutDismisser from '~/vue_shared/components/user_callout_dismisser import SectionLayout from '~/vue_shared/security_configuration/components/section_layout.vue'; import SafeHtml from '~/vue_shared/directives/safe_html'; import { SERVICE_PING_SECURITY_CONFIGURATION_THREAT_MANAGEMENT_VISIT } from '~/tracking/constants'; +import { REPORT_TYPE_CONTAINER_SCANNING_FOR_REGISTRY } from '~/vue_shared/security_reports/constants'; import { AUTO_DEVOPS_ENABLED_ALERT_DISMISSED_STORAGE_KEY, TAB_VULNERABILITY_MANAGEMENT_INDEX, @@ -36,6 +37,10 @@ export default { import('ee_component/security_configuration/components/upgrade_banner.vue'), UserCalloutDismisser, TrainingProviderList, + ContainerScanningForRegistryFeatureCard: () => + import( + 'ee_component/security_configuration/components/container_scanning_for_registry_feature_card.vue' + ), }, directives: { SafeHtml }, inject: ['projectFullPath', 'vulnerabilityTrainingDocsPath'], @@ -102,6 +107,9 @@ export default { if (feature.type === PRE_RECEIVE_SECRET_DETECTION) { return 'pre-receive-secret-detection-feature-card'; } + if (feature.type === REPORT_TYPE_CONTAINER_SCANNING_FOR_REGISTRY) { + return 'container-scanning-for-registry-feature-card'; + } return 'feature-card'; }, dismissAutoDevopsEnabledAlert() { diff --git a/app/assets/javascripts/security_configuration/components/container_scanning_for_registry.vue b/app/assets/javascripts/security_configuration/components/container_scanning_for_registry.vue deleted file mode 100644 index 1b64bdce863..00000000000 --- a/app/assets/javascripts/security_configuration/components/container_scanning_for_registry.vue +++ /dev/null @@ -1,95 +0,0 @@ - - - diff --git a/app/assets/javascripts/security_configuration/components/feature_card.vue b/app/assets/javascripts/security_configuration/components/feature_card.vue index d965ef79c86..f64c1b6806c 100644 --- a/app/assets/javascripts/security_configuration/components/feature_card.vue +++ b/app/assets/javascripts/security_configuration/components/feature_card.vue @@ -23,19 +23,12 @@ export default { required: true, }, }, - data() { - return { - overrideStatus: null, - }; - }, computed: { available() { return this.feature.available; }, enabled() { - return !(this.overrideStatus === null) - ? this.overrideStatus - : this.available && this.feature.configured; + return this.available && this.feature.configured; }, shortName() { return this.feature.shortName ?? this.feature.name; @@ -107,9 +100,6 @@ export default { onError(message) { this.$emit('error', message); }, - onOverrideStatus(status) { - this.overrideStatus = status; - }, }, i18n: { enabled: s__('SecurityConfiguration|Enabled'), @@ -225,12 +215,5 @@ export default { {{ $options.i18n.configurationGuide }}
- - diff --git a/app/assets/javascripts/security_configuration/constants.js b/app/assets/javascripts/security_configuration/constants.js index 7e3a1bac5c5..191f8e5f951 100644 --- a/app/assets/javascripts/security_configuration/constants.js +++ b/app/assets/javascripts/security_configuration/constants.js @@ -8,15 +8,12 @@ import { REPORT_TYPE_SAST, REPORT_TYPE_SAST_IAC, REPORT_TYPE_SECRET_DETECTION, - REPORT_TYPE_CONTAINER_SCANNING_FOR_REGISTRY, } from '~/vue_shared/security_reports/constants'; import configureSastMutation from './graphql/configure_sast.mutation.graphql'; import configureSastIacMutation from './graphql/configure_iac.mutation.graphql'; import configureSecretDetectionMutation from './graphql/configure_secret_detection.mutation.graphql'; -import ContainerScanningForRegistry from './components/container_scanning_for_registry.vue'; - /** * Translations for Security Configuration Page * Make sure to add new scanner translations to the SCANNER_NAMES_MAP below. @@ -72,12 +69,6 @@ export const SCANNER_NAMES_MAP = { GENERIC: s__('ciReport|Manually added'), }; -export const securityFeatures = { - [REPORT_TYPE_CONTAINER_SCANNING_FOR_REGISTRY]: { - slotComponent: ContainerScanningForRegistry, - }, -}; - export const featureToMutationMap = { [REPORT_TYPE_SAST]: { mutationId: 'configureSast', diff --git a/app/assets/javascripts/security_configuration/graphql/set_container_scanning_for_registry.graphql b/app/assets/javascripts/security_configuration/graphql/set_container_scanning_for_registry.graphql deleted file mode 100644 index 843f0edbe02..00000000000 --- a/app/assets/javascripts/security_configuration/graphql/set_container_scanning_for_registry.graphql +++ /dev/null @@ -1,6 +0,0 @@ -mutation SetContainerScanningForRegistry($input: SetContainerScanningForRegistryInput!) { - setContainerScanningForRegistry(input: $input) { - containerScanningForRegistryEnabled - errors - } -} diff --git a/app/assets/javascripts/security_configuration/index.js b/app/assets/javascripts/security_configuration/index.js index 9b5fbd02776..d76466bfa0c 100644 --- a/app/assets/javascripts/security_configuration/index.js +++ b/app/assets/javascripts/security_configuration/index.js @@ -5,7 +5,6 @@ import createDefaultClient from '~/lib/graphql'; import { parseBooleanDataAttributes } from '~/lib/utils/dom_utils'; import SecurityConfigurationApp from './components/app.vue'; import { augmentFeatures } from './utils'; -import { securityFeatures } from './constants'; export const initSecurityConfiguration = (el) => { if (!el) { @@ -31,10 +30,7 @@ export const initSecurityConfiguration = (el) => { containerScanningForRegistryEnabled, } = el.dataset; - const { augmentedSecurityFeatures } = augmentFeatures( - securityFeatures, - features ? JSON.parse(features) : [], - ); + const { augmentedSecurityFeatures } = augmentFeatures(features ? JSON.parse(features) : []); return new Vue({ el, diff --git a/app/assets/javascripts/security_configuration/utils.js b/app/assets/javascripts/security_configuration/utils.js index ac83f21c1d2..ce7a647e176 100644 --- a/app/assets/javascripts/security_configuration/utils.js +++ b/app/assets/javascripts/security_configuration/utils.js @@ -14,7 +14,7 @@ import { REPORT_TYPE_DAST } from '~/vue_shared/security_reports/constants'; * @returns {Object} Object with enriched features from constants divided into Security and compliance Features */ -export const augmentFeatures = (securityFeatures, features = []) => { +export const augmentFeatures = (features = []) => { const featuresByType = features.reduce((acc, feature) => { acc[feature.type] = convertObjectPropsToCamelCase(feature, { deep: true }); return acc; @@ -31,7 +31,6 @@ export const augmentFeatures = (securityFeatures, features = []) => { const augmented = { ...feature, ...featuresByType[feature.type], - ...securityFeatures[feature.type], }; // Secondary layer copies some values from the first layer diff --git a/app/assets/javascripts/sidebar/components/labels/labels_select_widget/dropdown_contents_labels_view.vue b/app/assets/javascripts/sidebar/components/labels/labels_select_widget/dropdown_contents_labels_view.vue index d66d4e673ed..c24902796f4 100644 --- a/app/assets/javascripts/sidebar/components/labels/labels_select_widget/dropdown_contents_labels_view.vue +++ b/app/assets/javascripts/sidebar/components/labels/labels_select_widget/dropdown_contents_labels_view.vue @@ -167,8 +167,8 @@ export default { :active="shouldHighlightFirstItem && index === 0" active-class="is-focused" data-testid="labels-list" - @focus.native.capture="handleFocus($event, index)" - @click.native.capture.stop="handleLabelClick(label)" + @focus.capture.native="handleFocus($event, index)" + @click.capture.native.stop="handleLabelClick(label)" > diff --git a/app/assets/javascripts/vue_shared/components/color_select_dropdown/dropdown_contents_color_view.vue b/app/assets/javascripts/vue_shared/components/color_select_dropdown/dropdown_contents_color_view.vue index 22f3c35b9c3..257bec65506 100644 --- a/app/assets/javascripts/vue_shared/components/color_select_dropdown/dropdown_contents_color_view.vue +++ b/app/assets/javascripts/vue_shared/components/color_select_dropdown/dropdown_contents_color_view.vue @@ -44,7 +44,7 @@ export default { :is-checked="isColorSelected(color)" is-check-centered is-check-item - @click.native.capture.stop="handleColorClick(color)" + @click.capture.native.stop="handleColorClick(color)" > diff --git a/app/assets/javascripts/vue_shared/components/dropdown/dropdown_widget/dropdown_widget.vue b/app/assets/javascripts/vue_shared/components/dropdown/dropdown_widget/dropdown_widget.vue index 60e2ea15d15..a5843482609 100644 --- a/app/assets/javascripts/vue_shared/components/dropdown/dropdown_widget/dropdown_widget.vue +++ b/app/assets/javascripts/vue_shared/components/dropdown/dropdown_widget/dropdown_widget.vue @@ -158,7 +158,7 @@ export default { :is-checked="isSelected(option)" is-check-centered is-check-item - @click.native.capture.stop="selectOption(option)" + @click.capture.native.stop="selectOption(option)" > {{ option.title }} @@ -175,7 +175,7 @@ export default { :avatar-url="avatarUrl(option)" :secondary-text="secondaryText(option)" data-testid="unselected-option" - @click.native.capture.stop="selectOption(option)" + @click.capture.native.stop="selectOption(option)" > {{ option.title }} diff --git a/app/assets/javascripts/vue_shared/components/user_select/user_select.vue b/app/assets/javascripts/vue_shared/components/user_select/user_select.vue index c4ae0f00fe0..3a6b22e3a81 100644 --- a/app/assets/javascripts/vue_shared/components/user_select/user_select.vue +++ b/app/assets/javascripts/vue_shared/components/user_select/user_select.vue @@ -334,7 +334,7 @@ export default { :is-checked="selectedIsEmpty" is-check-centered data-testid="unassign" - @click.native.capture.stop="unassign" + @click.capture.native.stop="unassign" > {{ $options.i18n.unassigned @@ -351,7 +351,7 @@ export default { is-checked is-check-centered data-testid="selected-participant" - @click.native.capture.stop="unselect(item.username)" + @click.capture.native.stop="unselect(item.username)" > @@ -359,7 +359,7 @@ export default { * { display: inline-block; margin-bottom: 0; diff --git a/app/graphql/types/ci/stage_type.rb b/app/graphql/types/ci/stage_type.rb index 3eb4075dc5b..e66ad2ed562 100644 --- a/app/graphql/types/ci/stage_type.rb +++ b/app/graphql/types/ci/stage_type.rb @@ -52,7 +52,7 @@ module Types def jobs GraphQL::Pagination::ActiveRecordRelationConnection.new( - object.latest_statuses, + object.ordered_latest_statuses, max_page_size: Gitlab::CurrentSettings.current_application_settings.jobs_per_stage_page_size ) end diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb index 521f95e3cd1..31125b5a5a8 100644 --- a/app/models/ci/pipeline.rb +++ b/app/models/ci/pipeline.rb @@ -20,6 +20,7 @@ module Ci include FastDestroyAll::Helpers self.primary_key = :id + self.sequence_name = :ci_pipelines_id_seq MAX_OPEN_MERGE_REQUESTS_REFS = 4 diff --git a/app/models/ci/stage.rb b/app/models/ci/stage.rb index 7fc5d476825..dae3b1e9ace 100644 --- a/app/models/ci/stage.rb +++ b/app/models/ci/stage.rb @@ -206,5 +206,23 @@ module Ci def latest_stage_status statuses.latest.composite_status || 'skipped' end + + def ordered_latest_statuses + preload_metadata(statuses.in_order_of(:status, Ci::HasStatus::ORDERED_STATUSES).latest_ordered) + end + + def ordered_retried_statuses + preload_metadata(statuses.in_order_of(:status, Ci::HasStatus::ORDERED_STATUSES).retried_ordered) + end + + private + + def preload_metadata(statuses) + relations = [:metadata, :pipeline, { downstream_pipeline: [:user, { project: [:route, { namespace: :route }] }] }] + + Preloaders::CommitStatusPreloader.new(statuses).execute(relations) + + statuses + end end end diff --git a/app/models/commit.rb b/app/models/commit.rb index 4f7af5f6a26..68ceac6880f 100644 --- a/app/models/commit.rb +++ b/app/models/commit.rb @@ -15,6 +15,7 @@ class Commit include ::Gitlab::Utils::StrongMemoize include ActsAsPaginatedDiff include CacheMarkdownField + include GlobalID::Identification participant :author participant :committer @@ -644,3 +645,5 @@ class Commit MergeRequestsFinder.new(user, project_id: project_id).find_by(squash_commit_sha: id) end end + +Commit.prepend_mod_with('Projects::Commit') diff --git a/app/models/repository.rb b/app/models/repository.rb index 4a8c0975282..55e723c8b97 100644 --- a/app/models/repository.rb +++ b/app/models/repository.rb @@ -885,6 +885,11 @@ class Repository options[:start_repository] = start_project.repository.raw_repository end + skip_target_sha = options.delete(:skip_target_sha) + unless skip_target_sha || Feature.disabled?(:validate_target_sha_in_user_commit_files, project) + options[:target_sha] = self.commit(options[:branch_name])&.sha + end + with_cache_hooks { raw.commit_files(user, **options) } end diff --git a/app/presenters/commit_presenter.rb b/app/presenters/commit_presenter.rb index 2879326ff8a..600c7fa71f4 100644 --- a/app/presenters/commit_presenter.rb +++ b/app/presenters/commit_presenter.rb @@ -1,8 +1,6 @@ # frozen_string_literal: true class CommitPresenter < Gitlab::View::Presenter::Delegated - include GlobalID::Identification - presents ::Commit, as: :commit def detailed_status_for(ref) diff --git a/app/serializers/stage_entity.rb b/app/serializers/stage_entity.rb index 60cf8dc7473..59cf823e41e 100644 --- a/app/serializers/stage_entity.rb +++ b/app/serializers/stage_entity.rb @@ -13,13 +13,9 @@ class StageEntity < Grape::Entity if: ->(_, opts) { opts[:grouped] }, with: JobGroupEntity - expose :latest_statuses, if: ->(_, opts) { opts[:details] }, with: Ci::JobEntity do |_stage| - latest_statuses - end + expose :ordered_latest_statuses, as: :latest_statuses, if: ->(_, opts) { opts[:details] }, with: Ci::JobEntity - expose :retried, if: ->(_, opts) { opts[:retried] }, with: Ci::JobEntity do |_stage| - retried_statuses - end + expose :ordered_retried_statuses, as: :retried, if: ->(_, opts) { opts[:retried] }, with: Ci::JobEntity expose :detailed_status, as: :status, with: DetailedStatusEntity @@ -45,32 +41,4 @@ class StageEntity < Grape::Entity def detailed_status stage.detailed_status(request.current_user) end - - def latest_statuses - Ci::HasStatus::ORDERED_STATUSES.flat_map do |ordered_status| - grouped_statuses.fetch(ordered_status, []) - end - end - - def retried_statuses - Ci::HasStatus::ORDERED_STATUSES.flat_map do |ordered_status| - grouped_retried_statuses.fetch(ordered_status, []) - end - end - - def grouped_statuses - @grouped_statuses ||= preload_metadata(stage.statuses.latest_ordered).group_by(&:status) - end - - def grouped_retried_statuses - @grouped_retried_statuses ||= preload_metadata(stage.statuses.retried_ordered).group_by(&:status) - end - - def preload_metadata(statuses) - relations = [:metadata, :pipeline, { downstream_pipeline: [:user, { project: [:route, { namespace: :route }] }] }] - - Preloaders::CommitStatusPreloader.new(statuses).execute(relations) - - statuses - end end diff --git a/app/services/ci/partitions/setup_default_service.rb b/app/services/ci/partitions/setup_default_service.rb index 133d4efa506..e620ecf8a75 100644 --- a/app/services/ci/partitions/setup_default_service.rb +++ b/app/services/ci/partitions/setup_default_service.rb @@ -10,7 +10,6 @@ module Ci ].freeze def execute - return unless Feature.enabled?(:ci_partitioning_first_records, :instance) return if Ci::Partition.current setup_default_partitions @@ -31,7 +30,9 @@ module Ci end def setup_current_partition - Ci::Partition.find(Ci::Pipeline.current_partition_value).update!(status: Ci::Partition.statuses[:current]) + Ci::Partition + .find(Ci::Pipeline.current_partition_value) + .update!(status: Ci::Partition.statuses[:current]) end end end diff --git a/app/services/snippets/create_service.rb b/app/services/snippets/create_service.rb index cd554f1055b..58da5195c44 100644 --- a/app/services/snippets/create_service.rb +++ b/app/services/snippets/create_service.rb @@ -124,5 +124,9 @@ module Snippets def restricted_files_actions :create end + + def commit_attrs(snippet, msg) + super.merge(skip_target_sha: true) + end end end diff --git a/app/views/discussions/_notes.html.haml b/app/views/discussions/_notes.html.haml index 3f3468b1503..40d87ed7e2c 100644 --- a/app/views/discussions/_notes.html.haml +++ b/app/views/discussions/_notes.html.haml @@ -22,7 +22,7 @@ - elsif !current_user .disabled-comment.gl-text-center.gl-text-secondary Please - = link_to "register", new_session_path(:user, redirect_to_referer: 'yes') + = link_to _("register"), new_session_path(:user, redirect_to_referer: 'yes') or - = link_to "sign in", new_session_path(:user, redirect_to_referer: 'yes') + = link_to _("sign in"), new_session_path(:user, redirect_to_referer: 'yes') to reply diff --git a/app/views/projects/blob/viewers/_route_map.html.haml b/app/views/projects/blob/viewers/_route_map.html.haml index 64122b4dcd4..22e62e4e185 100644 --- a/app/views/projects/blob/viewers/_route_map.html.haml +++ b/app/views/projects/blob/viewers/_route_map.html.haml @@ -6,4 +6,4 @@ This Route Map is invalid: = viewer.validation_message -= link_to 'Learn more', help_page_path('ci/environments/index', anchor: 'go-from-source-files-to-public-pages') += link_to _('Learn more'), help_page_path('ci/environments/index', anchor: 'go-from-source-files-to-public-pages') diff --git a/app/views/projects/blob/viewers/_route_map_loading.html.haml b/app/views/projects/blob/viewers/_route_map_loading.html.haml index 0e5816a56af..b681462c5c7 100644 --- a/app/views/projects/blob/viewers/_route_map_loading.html.haml +++ b/app/views/projects/blob/viewers/_route_map_loading.html.haml @@ -1,4 +1,4 @@ = gl_loading_icon(inline: true, css_class: "gl-mr-1") Validating Route Map… -= link_to 'Learn more', help_page_path('ci/environments/index', anchor: 'go-from-source-files-to-public-pages') += link_to _('Learn more'), help_page_path('ci/environments/index', anchor: 'go-from-source-files-to-public-pages') diff --git a/config/feature_flags/gitlab_com_derisk/ci_partitioning_first_records.yml b/config/feature_flags/gitlab_com_derisk/validate_target_sha_in_user_commit_files.yml similarity index 66% rename from config/feature_flags/gitlab_com_derisk/ci_partitioning_first_records.yml rename to config/feature_flags/gitlab_com_derisk/validate_target_sha_in_user_commit_files.yml index d7a6f8d6e71..32d49121e88 100644 --- a/config/feature_flags/gitlab_com_derisk/ci_partitioning_first_records.yml +++ b/config/feature_flags/gitlab_com_derisk/validate_target_sha_in_user_commit_files.yml @@ -1,9 +1,9 @@ --- -name: ci_partitioning_first_records -feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/454974 -introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/148944 -rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/457866 -milestone: '17.0' -group: group::pipeline execution +name: validate_target_sha_in_user_commit_files +feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/384017 +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/161981 +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/479424 +milestone: '17.4' +group: group::source code type: gitlab_com_derisk default_enabled: false diff --git a/config/gitlab_loose_foreign_keys.yml b/config/gitlab_loose_foreign_keys.yml index edb07b8a9b5..1f04fb6aea3 100644 --- a/config/gitlab_loose_foreign_keys.yml +++ b/config/gitlab_loose_foreign_keys.yml @@ -342,6 +342,16 @@ p_ci_job_artifacts: - table: projects column: project_id on_delete: async_delete +p_ci_pipelines: + - table: merge_requests + column: merge_request_id + on_delete: async_delete + - table: users + column: user_id + on_delete: async_nullify + - table: projects + column: project_id + on_delete: async_delete p_ci_runner_machine_builds: - table: ci_runner_machines column: runner_machine_id @@ -491,6 +501,10 @@ vulnerability_occurrences: - table: ci_pipelines column: latest_pipeline_id on_delete: async_nullify +vulnerability_remediations: + - table: projects + column: project_id + on_delete: async_delete vulnerability_scanners: - table: projects column: project_id diff --git a/config/known_invalid_graphql_queries.yml b/config/known_invalid_graphql_queries.yml index 43b5a685886..13cb0b21990 100644 --- a/config/known_invalid_graphql_queries.yml +++ b/config/known_invalid_graphql_queries.yml @@ -3,5 +3,4 @@ filenames: - app/assets/javascripts/integrations/beyond_identity/graphql/mutations/create_beyond_identity_exclusion.mutation.graphql - app/assets/javascripts/integrations/beyond_identity/graphql/mutations/delete_beyond_identity_exclusion.mutation.graphql - app/assets/javascripts/integrations/beyond_identity/graphql/queries/beyond_identity_exclusions.query.graphql - - ee/app/assets/javascripts/oncall_schedules/graphql/mutations/update_oncall_schedule_rotation.mutation.graphql - - app/assets/javascripts/security_configuration/graphql/set_container_scanning_for_registry.graphql \ No newline at end of file + - ee/app/assets/javascripts/oncall_schedules/graphql/mutations/update_oncall_schedule_rotation.mutation.graphql \ No newline at end of file diff --git a/danger/tailwindcss/Dangerfile b/danger/tailwindcss/Dangerfile index bd78a3423b1..f32e6d0d7ca 100644 --- a/danger/tailwindcss/Dangerfile +++ b/danger/tailwindcss/Dangerfile @@ -29,66 +29,6 @@ module Danger MARKDOWN end - def report_legacy_utils_usage - `yarn tailwindcss:build` - - legacy_utils = File - .read("./config/helpers/tailwind/css_in_js.js") - .scan(/'(\.[^\']*)'/).flatten.map do |legacy_util| - legacy_util.gsub('.', 'gl-').gsub('\\\\!', '!') - end - - files_with_legacy_utils = @helper.all_changed_files.flat_map do |file| - next [] if file.end_with?('tailwind_equivalents.json') - - diff = @git.diff_for_file(file) - - # When a file is just moved around it appears in the changed files list - # but the diff is empty so we are skipping it. - next [] if diff.nil? - - used_legacy_utils = diff.patch.each_line.flat_map do |line| - next [] unless line.start_with?('+') - - legacy_utils.select do |legacy_util| - legacy_util_regex = if legacy_util.end_with?('!') - /#{legacy_util.gsub('\\\\!', '!')}/ - else - /#{legacy_util}(?!!)/ - end - - line.match?(legacy_util_regex) - end - end - - next [] if used_legacy_utils.empty? - - [[file, used_legacy_utils]] - end - - return "" if files_with_legacy_utils.empty? - - <<~MARKDOWN - ### Legacy CSS utility classes - - The following files contain legacy CSS utility classes: - #{format_files_with_legacy_utils_list(files_with_legacy_utils)} - - We are in the process of migrating our CSS utility classes to [Tailwind CSS](https://tailwindcss.com/). - The above CSS utility classes do not comply with Tailwind CSS naming conventions. - Please use the Tailwind CSS equivalent if it is available. - For more information see [Tailwind CSS developer documentation](https://docs.gitlab.com/ee/development/fe_guide/style/scss.html#tailwind-css). - - If the Tailwind CSS equivalent is not available, it is okay to use the legacy CSS utility class for now. - The Tailwind CSS equivalent will be made available when the corresponding migration issue - in [&13521](https://gitlab.com/groups/gitlab-org/-/epics/13521) is completed. - - If a legacy CSS utility class is listed above but you did not change it in this MR it is okay to leave for now. - If it is a small or simple MR, feel free to leave the code better than you found it and migrate those - legacy CSS utility classes to Tailwind CSS. - MARKDOWN - end - private def frontend_tailwindy_files(files) @@ -122,30 +62,11 @@ module Danger "- `#{file}`" end.join("\n") end - - def format_files_with_legacy_utils_list(files) - # rubocop:disable Gitlab/Json -- we are outside of the GitLab Rails context - # and therefore do not have access to the dependencies required for - # Gitlab::Json.parse to work. - tailwind_equivalents = JSON.parse( - File.read("./scripts/frontend/tailwind_equivalents.json") - ) - # rubocop:enable Gitlab/Json - - files.map do |file, legacy_utils| - legacy_utils_bullet_points = legacy_utils.map do |legacy_util| - tailwind_equivalent = tailwind_equivalents[legacy_util] || "Not available" - " - `#{legacy_util}` - Tailwind CSS equivalent: `#{tailwind_equivalent}`" - end.join("\n") - - "- `#{file}`\n" + legacy_utils_bullet_points - end.join("\n") - end end end danger_tailwind = Danger::Tailwind.new(helper, git) -report = danger_tailwind.report_interpolated_utils + danger_tailwind.report_legacy_utils_usage +report = danger_tailwind.report_interpolated_utils unless report.empty? markdown <<~MSG diff --git a/db/docs/dast_site_profiles_builds.yml b/db/docs/dast_site_profiles_builds.yml index 23c232c9b42..7873ac71d4d 100644 --- a/db/docs/dast_site_profiles_builds.yml +++ b/db/docs/dast_site_profiles_builds.yml @@ -7,7 +7,7 @@ feature_categories: description: Join table between DAST Site Profiles and CI Builds introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63362 milestone: '14.1' -gitlab_schema: gitlab_main_cell +gitlab_schema: gitlab_sec allow_cross_foreign_keys: - gitlab_main_clusterwide desired_sharding_key: diff --git a/db/docs/p_ci_pipelines.yml b/db/docs/p_ci_pipelines.yml new file mode 100644 index 00000000000..5488e50b6b0 --- /dev/null +++ b/db/docs/p_ci_pipelines.yml @@ -0,0 +1,12 @@ +--- +table_name: p_ci_pipelines +classes: +- Ci::Pipeline +feature_categories: +- continuous_integration +description: Routing table for ci_pipelines +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/163575 +milestone: '17.4' +gitlab_schema: gitlab_ci +sharding_key: + project_id: projects diff --git a/db/docs/vulnerability_remediations.yml b/db/docs/vulnerability_remediations.yml index ad57fcbd4b0..67bbe768eb5 100644 --- a/db/docs/vulnerability_remediations.yml +++ b/db/docs/vulnerability_remediations.yml @@ -8,8 +8,6 @@ feature_categories: description: Stores remediation information, such as diffs, for a given vulnerability introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47166 milestone: '13.7' -gitlab_schema: gitlab_main_cell -allow_cross_foreign_keys: -- gitlab_main_clusterwide +gitlab_schema: gitlab_sec sharding_key: project_id: projects diff --git a/db/post_migrate/20240815125740_remove_projects_vulnerability_remediations_project_id_fk.rb b/db/post_migrate/20240815125740_remove_projects_vulnerability_remediations_project_id_fk.rb new file mode 100644 index 00000000000..600b067eb06 --- /dev/null +++ b/db/post_migrate/20240815125740_remove_projects_vulnerability_remediations_project_id_fk.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +class RemoveProjectsVulnerabilityRemediationsProjectIdFk < Gitlab::Database::Migration[2.2] + milestone '17.4' + disable_ddl_transaction! + + FOREIGN_KEY_NAME = "fk_fc61a535a0" + + def up + with_lock_retries do + remove_foreign_key_if_exists(:vulnerability_remediations, :projects, + name: FOREIGN_KEY_NAME, reverse_lock_order: true) + end + end + + def down + add_concurrent_foreign_key(:vulnerability_remediations, :projects, + name: FOREIGN_KEY_NAME, column: :project_id, + target_column: :id, on_delete: :cascade) + end +end diff --git a/db/post_migrate/20240825014049_drop_old_unique_index_for_ci_pipelines.rb b/db/post_migrate/20240825014049_drop_old_unique_index_for_ci_pipelines.rb new file mode 100644 index 00000000000..76b260288df --- /dev/null +++ b/db/post_migrate/20240825014049_drop_old_unique_index_for_ci_pipelines.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +class DropOldUniqueIndexForCiPipelines < Gitlab::Database::Migration[2.2] + milestone '17.4' + disable_ddl_transaction! + + TABLE_NAME = :ci_pipelines + COLUMN_NAMES = [:project_id, :iid] + INDEX_NAME = :index_ci_pipelines_on_project_id_and_iid + WHERE_CLAUSE = 'iid IS NOT NULL' + + def up + remove_concurrent_index_by_name(TABLE_NAME, INDEX_NAME) + end + + def down + add_concurrent_index( + TABLE_NAME, COLUMN_NAMES, + unique: true, name: INDEX_NAME, where: WHERE_CLAUSE + ) + end +end diff --git a/db/post_migrate/20240825050223_validate_partitioning_constraint_for_ci_pipelines.rb b/db/post_migrate/20240825050223_validate_partitioning_constraint_for_ci_pipelines.rb new file mode 100644 index 00000000000..6e566b5bd26 --- /dev/null +++ b/db/post_migrate/20240825050223_validate_partitioning_constraint_for_ci_pipelines.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +class ValidatePartitioningConstraintForCiPipelines < Gitlab::Database::Migration[2.2] + milestone '17.4' + + TABLE_NAME = :ci_pipelines + CONSTRAINT_NAME = :partitioning_constraint + + def up + validate_check_constraint(TABLE_NAME, CONSTRAINT_NAME) + end + + def down + # no-op + end +end diff --git a/db/post_migrate/20240825055929_convert_ci_pipelines_to_list_partitioning.rb b/db/post_migrate/20240825055929_convert_ci_pipelines_to_list_partitioning.rb new file mode 100644 index 00000000000..6234811ea6d --- /dev/null +++ b/db/post_migrate/20240825055929_convert_ci_pipelines_to_list_partitioning.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +class ConvertCiPipelinesToListPartitioning < Gitlab::Database::Migration[2.2] + include Gitlab::Database::PartitioningMigrationHelpers::TableManagementHelpers + + milestone '17.4' + disable_ddl_transaction! + + TABLE_NAME = :ci_pipelines + PARENT_TABLE_NAME = :p_ci_pipelines + FIRST_PARTITION = (100..102).to_a + PARTITION_COLUMN = :partition_id + + def up + convert_table_to_first_list_partition( + table_name: TABLE_NAME, + partitioning_column: PARTITION_COLUMN, + parent_table_name: PARENT_TABLE_NAME, + initial_partitioning_value: FIRST_PARTITION + ) + end + + def down + revert_converting_table_to_first_list_partition( + table_name: TABLE_NAME, + partitioning_column: PARTITION_COLUMN, + parent_table_name: PARENT_TABLE_NAME, + initial_partitioning_value: FIRST_PARTITION + ) + end +end diff --git a/db/schema_migrations/20240815125740 b/db/schema_migrations/20240815125740 new file mode 100644 index 00000000000..72db83f5303 --- /dev/null +++ b/db/schema_migrations/20240815125740 @@ -0,0 +1 @@ +3525ce14c670e1e6ac5b095e19ce1b6a217b5e760108a1303ae31e8d239cad6a \ No newline at end of file diff --git a/db/schema_migrations/20240825014049 b/db/schema_migrations/20240825014049 new file mode 100644 index 00000000000..96fa2569c5e --- /dev/null +++ b/db/schema_migrations/20240825014049 @@ -0,0 +1 @@ +cd358444afafc4310d85fee44a8293c593be17d67986b9e26762bafd3bd23b2a \ No newline at end of file diff --git a/db/schema_migrations/20240825050223 b/db/schema_migrations/20240825050223 new file mode 100644 index 00000000000..8b972bac18d --- /dev/null +++ b/db/schema_migrations/20240825050223 @@ -0,0 +1 @@ +46690967dc9a89f2efb44c5b6d15c620143c06a346bfea88617fc6c0b2c27563 \ No newline at end of file diff --git a/db/schema_migrations/20240825055929 b/db/schema_migrations/20240825055929 new file mode 100644 index 00000000000..cb6add49c27 --- /dev/null +++ b/db/schema_migrations/20240825055929 @@ -0,0 +1 @@ +b640f7b4d5e70ba69e218c62de75f5d9ba8d0639c3d5c75329ba05f662c2212f \ No newline at end of file diff --git a/db/structure.sql b/db/structure.sql index ece8f11ca0f..70f391a317f 100644 --- a/db/structure.sql +++ b/db/structure.sql @@ -8324,7 +8324,7 @@ CREATE SEQUENCE ci_pipeline_variables_id_seq ALTER SEQUENCE ci_pipeline_variables_id_seq OWNED BY p_ci_pipeline_variables.id; -CREATE TABLE ci_pipelines ( +CREATE TABLE p_ci_pipelines ( ref character varying, sha character varying, before_sha character varying, @@ -8358,13 +8358,8 @@ CREATE TABLE ci_pipelines ( auto_canceled_by_partition_id bigint, CONSTRAINT check_2ba2a044b9 CHECK ((project_id IS NOT NULL)), CONSTRAINT check_d7e99a025e CHECK ((lock_version IS NOT NULL)) -); - -CREATE TABLE ci_pipelines_config ( - pipeline_id bigint NOT NULL, - content text NOT NULL, - partition_id bigint NOT NULL -); +) +PARTITION BY LIST (partition_id); CREATE SEQUENCE ci_pipelines_id_seq START WITH 1 @@ -8373,7 +8368,49 @@ CREATE SEQUENCE ci_pipelines_id_seq NO MAXVALUE CACHE 1; -ALTER SEQUENCE ci_pipelines_id_seq OWNED BY ci_pipelines.id; +ALTER SEQUENCE ci_pipelines_id_seq OWNED BY p_ci_pipelines.id; + +CREATE TABLE ci_pipelines ( + ref character varying, + sha character varying, + before_sha character varying, + created_at timestamp without time zone, + updated_at timestamp without time zone, + tag boolean DEFAULT false, + yaml_errors text, + committed_at timestamp without time zone, + project_id integer, + status character varying, + started_at timestamp without time zone, + finished_at timestamp without time zone, + duration integer, + user_id integer, + lock_version integer DEFAULT 0, + pipeline_schedule_id integer, + source integer, + config_source integer, + protected boolean, + failure_reason integer, + iid integer, + merge_request_id integer, + source_sha bytea, + target_sha bytea, + external_pull_request_id bigint, + ci_ref_id bigint, + locked smallint DEFAULT 1 NOT NULL, + partition_id bigint NOT NULL, + id bigint DEFAULT nextval('ci_pipelines_id_seq'::regclass) NOT NULL, + auto_canceled_by_id bigint, + auto_canceled_by_partition_id bigint, + CONSTRAINT check_2ba2a044b9 CHECK ((project_id IS NOT NULL)), + CONSTRAINT check_d7e99a025e CHECK ((lock_version IS NOT NULL)) +); + +CREATE TABLE ci_pipelines_config ( + pipeline_id bigint NOT NULL, + content text NOT NULL, + partition_id bigint NOT NULL +); CREATE TABLE ci_platform_metrics ( id bigint NOT NULL, @@ -21021,6 +21058,8 @@ ALTER TABLE ONLY p_ci_job_artifacts ATTACH PARTITION ci_job_artifacts FOR VALUES ALTER TABLE ONLY p_ci_pipeline_variables ATTACH PARTITION ci_pipeline_variables FOR VALUES IN ('100', '101'); +ALTER TABLE ONLY p_ci_pipelines ATTACH PARTITION ci_pipelines FOR VALUES IN ('100', '101', '102'); + ALTER TABLE ONLY p_ci_stages ATTACH PARTITION ci_stages FOR VALUES IN ('100', '101'); ALTER TABLE ONLY abuse_events ALTER COLUMN id SET DEFAULT nextval('abuse_events_id_seq'::regclass); @@ -21283,8 +21322,6 @@ ALTER TABLE ONLY ci_pipeline_schedule_variables ALTER COLUMN id SET DEFAULT next ALTER TABLE ONLY ci_pipeline_schedules ALTER COLUMN id SET DEFAULT nextval('ci_pipeline_schedules_id_seq'::regclass); -ALTER TABLE ONLY ci_pipelines ALTER COLUMN id SET DEFAULT nextval('ci_pipelines_id_seq'::regclass); - ALTER TABLE ONLY ci_platform_metrics ALTER COLUMN id SET DEFAULT nextval('ci_platform_metrics_id_seq'::regclass); ALTER TABLE ONLY ci_project_mirrors ALTER COLUMN id SET DEFAULT nextval('ci_project_mirrors_id_seq'::regclass); @@ -21797,6 +21834,8 @@ ALTER TABLE ONLY p_catalog_resource_sync_events ALTER COLUMN id SET DEFAULT next ALTER TABLE ONLY p_ci_builds_metadata ALTER COLUMN id SET DEFAULT nextval('ci_builds_metadata_id_seq'::regclass); +ALTER TABLE ONLY p_ci_pipelines ALTER COLUMN id SET DEFAULT nextval('ci_pipelines_id_seq'::regclass); + ALTER TABLE ONLY packages_build_infos ALTER COLUMN id SET DEFAULT nextval('packages_build_infos_id_seq'::regclass); ALTER TABLE ONLY packages_composer_cache_files ALTER COLUMN id SET DEFAULT nextval('packages_composer_cache_files_id_seq'::regclass); @@ -23290,6 +23329,9 @@ ALTER TABLE ONLY ci_pipeline_variables ALTER TABLE ONLY ci_pipelines_config ADD CONSTRAINT ci_pipelines_config_pkey PRIMARY KEY (pipeline_id); +ALTER TABLE ONLY p_ci_pipelines + ADD CONSTRAINT p_ci_pipelines_pkey PRIMARY KEY (id, partition_id); + ALTER TABLE ONLY ci_pipelines ADD CONSTRAINT ci_pipelines_pkey PRIMARY KEY (id, partition_id); @@ -24352,9 +24394,6 @@ ALTER TABLE ONLY pages_domain_acme_orders ALTER TABLE ONLY pages_domains ADD CONSTRAINT pages_domains_pkey PRIMARY KEY (id); -ALTER TABLE ci_pipelines - ADD CONSTRAINT partitioning_constraint CHECK ((partition_id = ANY (ARRAY[(100)::bigint, (101)::bigint, (102)::bigint]))) NOT VALID; - ALTER TABLE ONLY path_locks ADD CONSTRAINT path_locks_pkey PRIMARY KEY (id); @@ -26377,6 +26416,8 @@ CREATE UNIQUE INDEX p_ci_job_artifacts_job_id_file_type_partition_id_idx ON ONLY CREATE UNIQUE INDEX idx_ci_job_artifacts_on_job_id_file_type_and_partition_id_uniq ON ci_job_artifacts USING btree (job_id, file_type, partition_id); +CREATE INDEX p_ci_pipelines_ci_ref_id_id_idx ON ONLY p_ci_pipelines USING btree (ci_ref_id, id) WHERE (locked = 1); + CREATE INDEX idx_ci_pipelines_artifacts_locked ON ci_pipelines USING btree (ci_ref_id, id) WHERE (locked = 1); CREATE INDEX idx_ci_running_builds_on_runner_type_and_owner_xid_and_id ON ci_running_builds USING btree (runner_type, runner_owner_namespace_xid, runner_id); @@ -27319,48 +27360,88 @@ CREATE INDEX index_ci_pipeline_schedules_on_owner_id_and_id_and_active ON ci_pip CREATE INDEX index_ci_pipeline_schedules_on_project_id ON ci_pipeline_schedules USING btree (project_id); +CREATE INDEX p_ci_pipelines_id_idx ON ONLY p_ci_pipelines USING btree (id) WHERE (source = 13); + CREATE INDEX index_ci_pipelines_for_ondemand_dast_scans ON ci_pipelines USING btree (id) WHERE (source = 13); +CREATE INDEX p_ci_pipelines_auto_canceled_by_id_idx ON ONLY p_ci_pipelines USING btree (auto_canceled_by_id); + CREATE INDEX index_ci_pipelines_on_auto_canceled_by_id ON ci_pipelines USING btree (auto_canceled_by_id); +CREATE INDEX p_ci_pipelines_ci_ref_id_id_source_status_idx ON ONLY p_ci_pipelines USING btree (ci_ref_id, id DESC, source, status) WHERE (ci_ref_id IS NOT NULL); + CREATE INDEX index_ci_pipelines_on_ci_ref_id_and_more ON ci_pipelines USING btree (ci_ref_id, id DESC, source, status) WHERE (ci_ref_id IS NOT NULL); +CREATE INDEX p_ci_pipelines_external_pull_request_id_idx ON ONLY p_ci_pipelines USING btree (external_pull_request_id) WHERE (external_pull_request_id IS NOT NULL); + CREATE INDEX index_ci_pipelines_on_external_pull_request_id ON ci_pipelines USING btree (external_pull_request_id) WHERE (external_pull_request_id IS NOT NULL); +CREATE INDEX p_ci_pipelines_merge_request_id_idx ON ONLY p_ci_pipelines USING btree (merge_request_id) WHERE (merge_request_id IS NOT NULL); + CREATE INDEX index_ci_pipelines_on_merge_request_id ON ci_pipelines USING btree (merge_request_id) WHERE (merge_request_id IS NOT NULL); +CREATE INDEX p_ci_pipelines_pipeline_schedule_id_id_idx ON ONLY p_ci_pipelines USING btree (pipeline_schedule_id, id); + CREATE INDEX index_ci_pipelines_on_pipeline_schedule_id_and_id ON ci_pipelines USING btree (pipeline_schedule_id, id); +CREATE INDEX p_ci_pipelines_project_id_id_idx ON ONLY p_ci_pipelines USING btree (project_id, id DESC); + CREATE INDEX index_ci_pipelines_on_project_id_and_id_desc ON ci_pipelines USING btree (project_id, id DESC); -CREATE UNIQUE INDEX index_ci_pipelines_on_project_id_and_iid ON ci_pipelines USING btree (project_id, iid) WHERE (iid IS NOT NULL); +CREATE UNIQUE INDEX p_ci_pipelines_project_id_iid_partition_id_idx ON ONLY p_ci_pipelines USING btree (project_id, iid, partition_id) WHERE (iid IS NOT NULL); CREATE UNIQUE INDEX index_ci_pipelines_on_project_id_and_iid_and_partition_id ON ci_pipelines USING btree (project_id, iid, partition_id) WHERE (iid IS NOT NULL); +CREATE INDEX p_ci_pipelines_project_id_ref_status_id_idx ON ONLY p_ci_pipelines USING btree (project_id, ref, status, id); + CREATE INDEX index_ci_pipelines_on_project_id_and_ref_and_status_and_id ON ci_pipelines USING btree (project_id, ref, status, id); +CREATE INDEX p_ci_pipelines_project_id_sha_idx ON ONLY p_ci_pipelines USING btree (project_id, sha); + CREATE INDEX index_ci_pipelines_on_project_id_and_sha ON ci_pipelines USING btree (project_id, sha); +CREATE INDEX p_ci_pipelines_project_id_source_idx ON ONLY p_ci_pipelines USING btree (project_id, source); + CREATE INDEX index_ci_pipelines_on_project_id_and_source ON ci_pipelines USING btree (project_id, source); +CREATE INDEX p_ci_pipelines_project_id_status_config_source_idx ON ONLY p_ci_pipelines USING btree (project_id, status, config_source); + CREATE INDEX index_ci_pipelines_on_project_id_and_status_and_config_source ON ci_pipelines USING btree (project_id, status, config_source); +CREATE INDEX p_ci_pipelines_project_id_status_created_at_idx ON ONLY p_ci_pipelines USING btree (project_id, status, created_at); + CREATE INDEX index_ci_pipelines_on_project_id_and_status_and_created_at ON ci_pipelines USING btree (project_id, status, created_at); +CREATE INDEX p_ci_pipelines_project_id_status_updated_at_idx ON ONLY p_ci_pipelines USING btree (project_id, status, updated_at); + CREATE INDEX index_ci_pipelines_on_project_id_and_status_and_updated_at ON ci_pipelines USING btree (project_id, status, updated_at); +CREATE INDEX p_ci_pipelines_project_id_user_id_status_ref_idx ON ONLY p_ci_pipelines USING btree (project_id, user_id, status, ref) WHERE (source <> 12); + CREATE INDEX index_ci_pipelines_on_project_id_and_user_id_and_status_and_ref ON ci_pipelines USING btree (project_id, user_id, status, ref) WHERE (source <> 12); +CREATE INDEX p_ci_pipelines_project_id_ref_id_idx ON ONLY p_ci_pipelines USING btree (project_id, ref, id DESC); + CREATE INDEX index_ci_pipelines_on_project_idandrefandiddesc ON ci_pipelines USING btree (project_id, ref, id DESC); +CREATE INDEX p_ci_pipelines_status_id_idx ON ONLY p_ci_pipelines USING btree (status, id); + CREATE INDEX index_ci_pipelines_on_status_and_id ON ci_pipelines USING btree (status, id); +CREATE INDEX p_ci_pipelines_user_id_created_at_config_source_idx ON ONLY p_ci_pipelines USING btree (user_id, created_at, config_source); + CREATE INDEX index_ci_pipelines_on_user_id_and_created_at_and_config_source ON ci_pipelines USING btree (user_id, created_at, config_source); +CREATE INDEX p_ci_pipelines_user_id_created_at_source_idx ON ONLY p_ci_pipelines USING btree (user_id, created_at, source); + CREATE INDEX index_ci_pipelines_on_user_id_and_created_at_and_source ON ci_pipelines USING btree (user_id, created_at, source); +CREATE INDEX p_ci_pipelines_user_id_id_idx ON ONLY p_ci_pipelines USING btree (user_id, id) WHERE ((status)::text = ANY (ARRAY[('running'::character varying)::text, ('waiting_for_resource'::character varying)::text, ('preparing'::character varying)::text, ('pending'::character varying)::text, ('created'::character varying)::text, ('scheduled'::character varying)::text])); + CREATE INDEX index_ci_pipelines_on_user_id_and_id_and_cancelable_status ON ci_pipelines USING btree (user_id, id) WHERE ((status)::text = ANY (ARRAY[('running'::character varying)::text, ('waiting_for_resource'::character varying)::text, ('preparing'::character varying)::text, ('pending'::character varying)::text, ('created'::character varying)::text, ('scheduled'::character varying)::text])); +CREATE INDEX p_ci_pipelines_user_id_id_idx1 ON ONLY p_ci_pipelines USING btree (user_id, id DESC) WHERE (failure_reason = 3); + CREATE INDEX index_ci_pipelines_on_user_id_and_id_desc_and_user_not_verified ON ci_pipelines USING btree (user_id, id DESC) WHERE (failure_reason = 3); CREATE INDEX index_ci_project_mirrors_on_namespace_id ON ci_project_mirrors USING btree (namespace_id); @@ -32333,10 +32414,14 @@ ALTER INDEX p_ci_job_artifacts_pkey ATTACH PARTITION ci_job_artifacts_pkey; ALTER INDEX p_ci_pipeline_variables_pkey ATTACH PARTITION ci_pipeline_variables_pkey; +ALTER INDEX p_ci_pipelines_pkey ATTACH PARTITION ci_pipelines_pkey; + ALTER INDEX p_ci_stages_pkey ATTACH PARTITION ci_stages_pkey; ALTER INDEX p_ci_job_artifacts_job_id_file_type_partition_id_idx ATTACH PARTITION idx_ci_job_artifacts_on_job_id_file_type_and_partition_id_uniq; +ALTER INDEX p_ci_pipelines_ci_ref_id_id_idx ATTACH PARTITION idx_ci_pipelines_artifacts_locked; + ALTER INDEX index_p_ci_builds_on_execution_config_id ATTACH PARTITION index_0928d9f200; ALTER INDEX p_ci_builds_metadata_build_id_idx ATTACH PARTITION index_ci_builds_metadata_on_build_id_and_has_exposed_artifacts; @@ -32407,6 +32492,48 @@ ALTER INDEX p_ci_job_artifacts_project_id_id_idx1 ATTACH PARTITION index_ci_job_ ALTER INDEX p_ci_job_artifacts_project_id_idx1 ATTACH PARTITION index_ci_job_artifacts_on_project_id_for_security_reports; +ALTER INDEX p_ci_pipelines_id_idx ATTACH PARTITION index_ci_pipelines_for_ondemand_dast_scans; + +ALTER INDEX p_ci_pipelines_auto_canceled_by_id_idx ATTACH PARTITION index_ci_pipelines_on_auto_canceled_by_id; + +ALTER INDEX p_ci_pipelines_ci_ref_id_id_source_status_idx ATTACH PARTITION index_ci_pipelines_on_ci_ref_id_and_more; + +ALTER INDEX p_ci_pipelines_external_pull_request_id_idx ATTACH PARTITION index_ci_pipelines_on_external_pull_request_id; + +ALTER INDEX p_ci_pipelines_merge_request_id_idx ATTACH PARTITION index_ci_pipelines_on_merge_request_id; + +ALTER INDEX p_ci_pipelines_pipeline_schedule_id_id_idx ATTACH PARTITION index_ci_pipelines_on_pipeline_schedule_id_and_id; + +ALTER INDEX p_ci_pipelines_project_id_id_idx ATTACH PARTITION index_ci_pipelines_on_project_id_and_id_desc; + +ALTER INDEX p_ci_pipelines_project_id_iid_partition_id_idx ATTACH PARTITION index_ci_pipelines_on_project_id_and_iid_and_partition_id; + +ALTER INDEX p_ci_pipelines_project_id_ref_status_id_idx ATTACH PARTITION index_ci_pipelines_on_project_id_and_ref_and_status_and_id; + +ALTER INDEX p_ci_pipelines_project_id_sha_idx ATTACH PARTITION index_ci_pipelines_on_project_id_and_sha; + +ALTER INDEX p_ci_pipelines_project_id_source_idx ATTACH PARTITION index_ci_pipelines_on_project_id_and_source; + +ALTER INDEX p_ci_pipelines_project_id_status_config_source_idx ATTACH PARTITION index_ci_pipelines_on_project_id_and_status_and_config_source; + +ALTER INDEX p_ci_pipelines_project_id_status_created_at_idx ATTACH PARTITION index_ci_pipelines_on_project_id_and_status_and_created_at; + +ALTER INDEX p_ci_pipelines_project_id_status_updated_at_idx ATTACH PARTITION index_ci_pipelines_on_project_id_and_status_and_updated_at; + +ALTER INDEX p_ci_pipelines_project_id_user_id_status_ref_idx ATTACH PARTITION index_ci_pipelines_on_project_id_and_user_id_and_status_and_ref; + +ALTER INDEX p_ci_pipelines_project_id_ref_id_idx ATTACH PARTITION index_ci_pipelines_on_project_idandrefandiddesc; + +ALTER INDEX p_ci_pipelines_status_id_idx ATTACH PARTITION index_ci_pipelines_on_status_and_id; + +ALTER INDEX p_ci_pipelines_user_id_created_at_config_source_idx ATTACH PARTITION index_ci_pipelines_on_user_id_and_created_at_and_config_source; + +ALTER INDEX p_ci_pipelines_user_id_created_at_source_idx ATTACH PARTITION index_ci_pipelines_on_user_id_and_created_at_and_source; + +ALTER INDEX p_ci_pipelines_user_id_id_idx ATTACH PARTITION index_ci_pipelines_on_user_id_and_id_and_cancelable_status; + +ALTER INDEX p_ci_pipelines_user_id_id_idx1 ATTACH PARTITION index_ci_pipelines_on_user_id_and_id_desc_and_user_not_verified; + ALTER INDEX p_ci_stages_pipeline_id_id_idx ATTACH PARTITION index_ci_stages_on_pipeline_id_and_id; ALTER INDEX p_ci_stages_pipeline_id_position_idx ATTACH PARTITION index_ci_stages_on_pipeline_id_and_position; @@ -32467,6 +32594,8 @@ CREATE TRIGGER organizations_loose_fk_trigger AFTER DELETE ON organizations REFE CREATE TRIGGER p_ci_builds_loose_fk_trigger AFTER DELETE ON p_ci_builds REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records(); +CREATE TRIGGER p_ci_pipelines_loose_fk_trigger AFTER DELETE ON p_ci_pipelines REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records(); + CREATE TRIGGER plans_loose_fk_trigger AFTER DELETE ON plans REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records(); CREATE TRIGGER prevent_delete_of_default_organization_before_destroy BEFORE DELETE ON organizations FOR EACH ROW EXECUTE FUNCTION prevent_delete_of_default_organization(); @@ -32880,7 +33009,7 @@ ALTER TABLE ONLY project_features ALTER TABLE ONLY abuse_report_events ADD CONSTRAINT fk_18c774c06b FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE SET NULL; -ALTER TABLE ONLY ci_pipelines +ALTER TABLE p_ci_pipelines ADD CONSTRAINT fk_190998ef09 FOREIGN KEY (external_pull_request_id) REFERENCES external_pull_requests(id) ON DELETE SET NULL; ALTER TABLE ONLY analytics_devops_adoption_segments @@ -33096,7 +33225,7 @@ ALTER TABLE ONLY bulk_import_export_uploads ALTER TABLE ONLY compliance_framework_security_policies ADD CONSTRAINT fk_3ce58167f1 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE; -ALTER TABLE ONLY ci_pipelines +ALTER TABLE p_ci_pipelines ADD CONSTRAINT fk_3d34ab2e06 FOREIGN KEY (pipeline_schedule_id) REFERENCES ci_pipeline_schedules(id) ON DELETE SET NULL; ALTER TABLE ONLY scan_result_policy_violations @@ -33990,7 +34119,7 @@ ALTER TABLE ONLY user_achievements ALTER TABLE ONLY metrics_users_starred_dashboards ADD CONSTRAINT fk_d76a2b9a8c FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE; -ALTER TABLE ONLY ci_pipelines +ALTER TABLE p_ci_pipelines ADD CONSTRAINT fk_d80e161c54 FOREIGN KEY (ci_ref_id) REFERENCES ci_refs(id) ON DELETE SET NULL; ALTER TABLE ONLY upcoming_reconciliations @@ -34230,9 +34359,6 @@ ALTER TABLE ONLY agent_group_authorizations ALTER TABLE ONLY system_note_metadata ADD CONSTRAINT fk_fbd87415c9 FOREIGN KEY (description_version_id) REFERENCES description_versions(id) ON DELETE SET NULL; -ALTER TABLE ONLY vulnerability_remediations - ADD CONSTRAINT fk_fc61a535a0 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE; - ALTER TABLE ONLY work_item_dates_sources ADD CONSTRAINT fk_fc7bc5e687 FOREIGN KEY (due_date_sourcing_milestone_id) REFERENCES milestones(id) ON DELETE SET NULL; diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md index 4df7198a507..ee568c45957 100644 --- a/doc/api/graphql/reference/index.md +++ b/doc/api/graphql/reference/index.md @@ -1610,6 +1610,7 @@ Input type: `AiActionInput` | `generateCubeQuery` | [`AiGenerateCubeQueryInput`](#aigeneratecubequeryinput) | Input for generate_cube_query AI action. | | `generateDescription` | [`AiGenerateDescriptionInput`](#aigeneratedescriptioninput) | Input for generate_description AI action. | | `platformOrigin` | [`String`](#string) | Specifies the origin platform of the request. | +| `projectId` | [`ProjectID`](#projectid) | Global ID of the project the user is acting on. | | `resolveVulnerability` | [`AiResolveVulnerabilityInput`](#airesolvevulnerabilityinput) | Input for resolve_vulnerability AI action. | | `summarizeComments` | [`AiSummarizeCommentsInput`](#aisummarizecommentsinput) | Input for summarize_comments AI action. | | `summarizeNewMergeRequest` | [`AiSummarizeNewMergeRequestInput`](#aisummarizenewmergerequestinput) | Input for summarize_new_merge_request AI action. | diff --git a/doc/api/group_service_accounts.md b/doc/api/group_service_accounts.md index 394887b0741..35ac5d31193 100644 --- a/doc/api/group_service_accounts.md +++ b/doc/api/group_service_accounts.md @@ -12,14 +12,14 @@ DETAILS: Interact with [service accounts](../user/profile/service_accounts.md) by using the REST API. +Prerequisites: + +- You must be an administrator of the self-managed instance, or have the Owner role for the GitLab.com group. + ## List service account users > - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/416729) in GitLab 17.1. -Prerequisites: - -- You must be an administrator of the self-managed instance, or have the Owner role for the group. - Lists all service account users that are provisioned by group. This function takes pagination parameters `page` and `per_page` to restrict the list of users. diff --git a/doc/security/webhooks.md b/doc/security/webhooks.md index e887e60c03c..11212c4bb20 100644 --- a/doc/security/webhooks.md +++ b/doc/security/webhooks.md @@ -192,6 +192,6 @@ This error can occur when outbound requests to the GitLab cloud server are not a 1. Add `https://cloud.gitlab.com:443` to the [allowlist](#allow-outbound-requests-to-certain-ip-addresses-and-domains). 1. Select **Save Changes**. -1. After GitLab has access to the [cloud server](../user/ai_features.md), [manually sychronize your license](../subscriptions/self_managed/index.md#manually-synchronize-subscription-data) +1. After GitLab has access to the [cloud server](../user/ai_features.md), [manually synchronize your license](../subscriptions/self_managed/index.md#manually-synchronize-subscription-data) For more information, see the [GitLab Duo Code Suggestions troubleshooting documentation](../user/project/repository/code_suggestions/troubleshooting.md). diff --git a/doc/subscriptions/self_managed/index.md b/doc/subscriptions/self_managed/index.md index 34e1e32e7df..e702123c711 100644 --- a/doc/subscriptions/self_managed/index.md +++ b/doc/subscriptions/self_managed/index.md @@ -323,33 +323,40 @@ The **Subscription** page includes the following information: ## Export your license usage -If you are an administrator, you can export your license usage into a CSV: +Prerequisites: + +- You must be an administrator. + +You can export your license usage into a CSV file. + +This file contains the information GitLab uses to manually process +[quarterly reconciliations](../../subscriptions/quarterly_reconciliation.md) +or [renewals](#renew-your-subscription). If your instance is firewalled or an +offline environment, you must provide GitLab with this information. + +WARNING: +Do not open the license usage file. If you open the file, failures might occur when [you submit your license usage data](../../administration/license_file.md#submit-license-usage-data). 1. On the left sidebar, at the bottom, select **Admin**. 1. Select **Subscription**. 1. In the upper-right corner, select **Export license usage file**. -This file contains the information GitLab uses to manually process quarterly reconciliations or renewals. If your instance is firewalled or an offline environment, you must provide GitLab with this information. +### License usage file contents -The **License Usage** CSV includes the following details: +The license usage file includes the following information: - License key - Licensee email -- License start date -- License end date +- License start date (UTC) +- License end date (UTC) - Company -- Generated at (the timestamp for when the file was exported) +- Timestamp the file was generated at and exported (UTC) - Table of historical user counts for each day in the period: - - Timestamp the count was recorded + - Timestamp the count was recorded (UTC) - [Billable user](#billable-users) count -NOTES: - -- All date timestamps are displayed in UTC. -- A custom format is used for [dates](https://gitlab.com/gitlab-org/gitlab/blob/3be39f19ac3412c089be28553e6f91b681e5d739/config/initializers/date_time_formats.rb#L7) and [times](https://gitlab.com/gitlab-org/gitlab/blob/3be39f19ac3412c089be28553e6f91b681e5d739/config/initializers/date_time_formats.rb#L13) in CSV files. - -WARNING: -Do not open the license usage file. If you open the file, failures might occur when [you submit your license usage data](../../administration/license_file.md#submit-license-usage-data). +NOTE: +A custom format is used for [dates](https://gitlab.com/gitlab-org/gitlab/blob/3be39f19ac3412c089be28553e6f91b681e5d739/config/initializers/date_time_formats.rb#L7) and [times](https://gitlab.com/gitlab-org/gitlab/blob/3be39f19ac3412c089be28553e6f91b681e5d739/config/initializers/date_time_formats.rb#L13) in CSV files. ## Renew your subscription diff --git a/doc/user/application_security/vulnerability_report/pipeline.md b/doc/user/application_security/vulnerability_report/pipeline.md index 34148ef4c9b..2b894aead7d 100644 --- a/doc/user/application_security/vulnerability_report/pipeline.md +++ b/doc/user/application_security/vulnerability_report/pipeline.md @@ -125,10 +125,7 @@ This does not apply for the vulnerabilities existing on the default branch. ## Change status of findings > - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/331408) in GitLab 16.7 [with a flag](../../../administration/feature_flags.md) named `pipeline_security_dashboard_graphql`. Disabled by default. - -FLAG: -On self-managed GitLab, by default this feature is not available. To make it available, an administrator can [enable the feature flag](../../../administration/feature_flags.md) named `pipeline_security_dashboard_graphql`. -On GitLab.com and GitLab Dedicated, this feature is not available. +> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/328818) in GitLab 17.4. Feature flag `pipeline_security_dashboard_graphql` removed. To change the status of findings to **Dismiss** or **Needs triage**: diff --git a/doc/user/custom_roles.md b/doc/user/custom_roles.md index fe7abc783af..3681e16d877 100644 --- a/doc/user/custom_roles.md +++ b/doc/user/custom_roles.md @@ -175,8 +175,9 @@ If you are assigning a custom role to an existing: 1. On the left sidebar, select **Search or go to** and find your group or project. 1. Select **Manage > Members**. -1. Select the **Max role** dropdown list for the member you want to select a custom role for. -1. On the **Change role** dialog, select a different custom role. +1. In the **Max role** column, select the role for the member. The **Role details** drawer opens. +1. Using the **Role** dropdown list, select the custom role you want to assign to the member. +1. Select **Update role** to assign the role. ### Use the API to assign a custom role @@ -225,8 +226,9 @@ To remove a custom role from a group member: 1. On the left sidebar, select **Search or go to** and find your group. 1. Select **Manage > Members**. -1. Select the **Max role** dropdown list for the member you want to remove a custom role from. -1. On the **Change role** dialog, select a default role. +1. In the **Max role** column, select the role for the member. The **Role details** drawer opens. +1. Using the **Role** dropdown list, select the default role you want to assign to the member. +1. Select **Update role** to assign the role. ### Use the API to change user role diff --git a/lib/api/usage_data.rb b/lib/api/usage_data.rb index adc64e46751..9a9ed959b7c 100644 --- a/lib/api/usage_data.rb +++ b/lib/api/usage_data.rb @@ -112,22 +112,28 @@ module API status :ok end - desc 'Track gitlab internal events' do - detail 'This feature was introduced in GitLab 16.2.' - success code: 200 - failure [ - { code: 401, message: 'Unauthorized' }, - { code: 404, message: 'Not found' } - ] - tags %w[usage_data] - end - params do - use :event_params - end - post 'track_event', urgency: :low do - process_event(params) + resource :track_event do + allow_access_with_scope :ai_workflows - status :ok + desc 'Track gitlab internal events' do + detail 'This feature was introduced in GitLab 16.2.' + success code: 200 + failure [ + { code: 401, message: 'Unauthorized' }, + { code: 404, message: 'Not found' } + ] + tags %w[usage_data] + end + + params do + use :event_params + end + + post urgency: :low do + process_event(params) + + status :ok + end end desc 'Track multiple gitlab internal events' do diff --git a/lib/gitlab/database/partitioning/list/convert_table.rb b/lib/gitlab/database/partitioning/list/convert_table.rb index fd36c0ec4a4..e8fde371fbb 100644 --- a/lib/gitlab/database/partitioning/list/convert_table.rb +++ b/lib/gitlab/database/partitioning/list/convert_table.rb @@ -214,6 +214,8 @@ module Gitlab .not_inherited .any? { |p_fk| p_fk.name == fk.name } + next if fk.referenced_table_name == table_name.to_s + migration_context.add_concurrent_foreign_key( parent_table_name, fk.referenced_table_name, diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb index 064368baf1f..09269878559 100644 --- a/lib/gitlab/git/repository.rb +++ b/lib/gitlab/git/repository.rb @@ -1014,6 +1014,7 @@ module Gitlab # @param [String] start_sha: The sha to be used as the parent of the commit. # @param [Gitlab::Git::Repository] start_repository: The repository that contains the start branch or sha. Defaults to use this repository. # @param [Boolean] force: Force update the branch. + # @param [String] target_sha: The latest sha of the target branch (optional). Used to prevent races in updates between different clients. # @return [Gitlab::Git::OperationService::BranchUpdate] # # rubocop:disable Metrics/ParameterLists @@ -1021,12 +1022,12 @@ module Gitlab user, branch_name:, message:, actions:, author_email: nil, author_name: nil, start_branch_name: nil, start_sha: nil, start_repository: nil, - force: false, sign: true) + force: false, sign: true, target_sha: nil) wrapped_gitaly_errors do gitaly_operation_client.user_commit_files(user, branch_name, - message, actions, author_email, author_name, - start_branch_name, start_repository, force, start_sha, sign) + message, actions, author_email, author_name, start_branch_name, + start_repository, force, start_sha, sign, target_sha) end end # rubocop:enable Metrics/ParameterLists diff --git a/lib/gitlab/gitaly_client/operation_service.rb b/lib/gitlab/gitaly_client/operation_service.rb index 6797890f8d0..fe352ade797 100644 --- a/lib/gitlab/gitaly_client/operation_service.rb +++ b/lib/gitlab/gitaly_client/operation_service.rb @@ -507,12 +507,12 @@ module Gitlab # rubocop:disable Metrics/ParameterLists def user_commit_files( - user, branch_name, commit_message, actions, author_email, author_name, - start_branch_name, start_repository, force = false, start_sha = nil, sign = true) + user, branch_name, commit_message, actions, author_email, author_name, start_branch_name, + start_repository, force = false, start_sha = nil, sign = true, target_sha = nil) req_enum = Enumerator.new do |y| header = user_commit_files_request_header(user, branch_name, - commit_message, actions, author_email, author_name, - start_branch_name, start_repository, force, start_sha, sign) + commit_message, actions, author_email, author_name, start_branch_name, + start_repository, force, start_sha, sign, target_sha) y.yield Gitaly::UserCommitFilesRequest.new(header: header) @@ -561,13 +561,7 @@ module Gitlab when :index_update raise Gitlab::Git::Index::IndexError, index_error_message(detailed_error.index_update) else - # Some invalid path errors are caught by Gitaly directly and returned - # as an :index_update error, while others are found by libgit2 and - # come as generic errors. We need to convert the latter as IndexErrors - # as well. - if e.to_status.details.start_with?('invalid path') - raise Gitlab::Git::Index::IndexError, e.to_status.details - end + handle_undetailed_bad_status_errors(e) raise e end @@ -614,7 +608,7 @@ module Gitlab # rubocop:disable Metrics/ParameterLists def user_commit_files_request_header( user, branch_name, commit_message, actions, author_email, author_name, - start_branch_name, start_repository, force, start_sha, sign) + start_branch_name, start_repository, force, start_sha, sign, target_sha) Gitaly::UserCommitFilesRequestHeader.new( repository: @gitaly_repo, @@ -628,6 +622,7 @@ module Gitlab force: force, start_sha: encode_binary(start_sha), sign: sign, + expected_old_oid: target_sha, timestamp: Google::Protobuf::Timestamp.new(seconds: Time.now.utc.to_i) ) end @@ -673,6 +668,18 @@ module Gitlab "Unknown error performing git operation" end end + + def handle_undetailed_bad_status_errors(error) + # Some invalid path errors are caught by Gitaly directly and returned + # as an :index_update error, while others are found by libgit2 and + # come as generic errors. We need to convert the latter as IndexErrors + # as well. + if error.to_status.details.start_with?('invalid path') + raise Gitlab::Git::Index::IndexError, error.to_status.details + elsif error.is_a?(GRPC::InvalidArgument) && error.to_status.details.include?('expected old object ID') + raise Gitlab::Git::CommandError, error + end + end end end end diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 1e2ea1f4b40..f83f9bb857a 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -357,6 +357,16 @@ msgid_plural "%d layers" msgstr[0] "" msgstr[1] "" +msgid "%d matching branch" +msgid_plural "%d matching branches" +msgstr[0] "" +msgstr[1] "" + +msgid "%d matching environment" +msgid_plural "%d matching environments" +msgstr[0] "" +msgstr[1] "" + msgid "%d merge request" msgid_plural "%d merge requests" msgstr[0] "" @@ -1007,6 +1017,9 @@ msgstr "" msgid "%{label_name} was removed" msgstr "" +msgid "%{lastUsed} by" +msgstr "" + msgid "%{lessThan} 1 hour" msgstr "" @@ -1203,6 +1216,9 @@ msgid_plural "%{reviewer_names} were removed from reviewers." msgstr[0] "" msgstr[1] "" +msgid "%{rotationDate} (%{rotationPeriod})" +msgstr "" + msgid "%{rotation} has been recalculated with the remaining participants. Please review the new setup for %{rotation_link}. It is recommended that you reach out to the current on-call responder to ensure continuity of on-call coverage." msgstr "" @@ -10434,7 +10450,16 @@ msgstr "" msgid "CVE|Why Request a CVE ID?" msgstr "" -msgid "CVS|Toggle CVS" +msgid "CVS|Action unavailable" +msgstr "" + +msgid "CVS|Container Scanning for Registry is disabled" +msgstr "" + +msgid "CVS|Container Scanning for Registry is enabled" +msgstr "" + +msgid "CVS|Only a project maintainer or owner can toggle this feature." msgstr "" msgid "Cadence is not automated" @@ -45812,7 +45837,7 @@ msgstr "" msgid "Rollback" msgstr "" -msgid "Rotation schedule" +msgid "Rotation reminder" msgstr "" msgid "Ruby" @@ -48283,6 +48308,9 @@ msgstr "" msgid "SecurityConfiguration|The status of the tools only applies to the default branch and is based on the %{linkStart}latest pipeline%{linkEnd}." msgstr "" +msgid "SecurityConfiguration|Toggle Container Scanning for Registry" +msgstr "" + msgid "SecurityConfiguration|Toggle secret push protection" msgstr "" diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb index 2fffa762130..1c5a8e6db24 100644 --- a/spec/db/schema_spec.rb +++ b/spec/db/schema_spec.rb @@ -82,6 +82,7 @@ RSpec.describe 'Database schema', feature_category: :database do ci_pipeline_metadata: %w[partition_id], ci_pipeline_variables: %w[partition_id], ci_pipelines: %w[partition_id auto_canceled_by_partition_id], + p_ci_pipelines: %w[partition_id auto_canceled_by_partition_id auto_canceled_by_id], ci_runner_projects: %w[runner_id], ci_sources_pipelines: %w[partition_id source_partition_id source_job_id], ci_sources_projects: %w[partition_id], diff --git a/spec/frontend/security_configuration/components/container_scanning_for_registry_spec.js b/spec/frontend/security_configuration/components/container_scanning_for_registry_spec.js deleted file mode 100644 index b4c50d2990e..00000000000 --- a/spec/frontend/security_configuration/components/container_scanning_for_registry_spec.js +++ /dev/null @@ -1,124 +0,0 @@ -import { shallowMount } from '@vue/test-utils'; -import { GlToggle } from '@gitlab/ui'; -import VueApollo from 'vue-apollo'; -import Vue from 'vue'; -import SetContainerScanningForRegistry from '~/security_configuration/graphql/set_container_scanning_for_registry.graphql'; -import ContainerScanningForRegistry from '~/security_configuration/components/container_scanning_for_registry.vue'; -import createMockApollo from 'helpers/mock_apollo_helper'; -import waitForPromises from 'helpers/wait_for_promises'; - -Vue.use(VueApollo); - -const getSetCVSMockResponse = (enabled = true) => ({ - data: { - setContainerScanningForRegistry: { - containerScanningForRegistryEnabled: enabled, - errors: [], - }, - }, -}); - -const defaultProvide = { - containerScanningForRegistryEnabled: true, - projectFullPath: 'project/full/path', -}; - -describe('ContainerScanningForRegistry', () => { - let wrapper; - let apolloProvider; - let requestHandlers; - - const createComponent = (options = {}) => { - requestHandlers = { - setCVSMutationHandler: jest.fn().mockResolvedValue(getSetCVSMockResponse(options.enabled)), - }; - - apolloProvider = createMockApollo([ - [SetContainerScanningForRegistry, requestHandlers.setCVSMutationHandler], - ]); - - wrapper = shallowMount(ContainerScanningForRegistry, { - propsData: { - feature: { - available: true, - configured: true, - }, - }, - provide: { - ...defaultProvide, - }, - apolloProvider, - ...options, - }); - }; - - beforeEach(() => { - createComponent(); - }); - - afterEach(() => { - apolloProvider = null; - }); - - const findToggle = () => wrapper.findComponent(GlToggle); - - it('renders the component', () => { - expect(wrapper.exists()).toBe(true); - }); - - it('renders the toggle component with correct values', () => { - expect(findToggle().exists()).toBe(true); - expect(findToggle().props('value')).toBe(defaultProvide.containerScanningForRegistryEnabled); - }); - - it('should allow toggle when feature is not configured', () => { - createComponent({ - propsData: { - feature: { - available: true, - configured: false, - }, - }, - }); - expect(findToggle().props('disabled')).toBe(false); - }); - - it.each([true, false])( - 'calls mutation on toggle change with correct payload when %s', - async (enabled) => { - createComponent({ enabled }); - - findToggle().vm.$emit('change', enabled); - - expect(requestHandlers.setCVSMutationHandler).toHaveBeenCalledWith({ - input: { - namespacePath: 'project/full/path', - enable: enabled, - }, - }); - - await waitForPromises(); - - expect(findToggle().props('value')).toBe(enabled); - }, - ); - - it('emits the overrideStatus event with toggle value', async () => { - const enabled = true; - createComponent({ enabled }); - - findToggle().vm.$emit('change', enabled); - - expect(requestHandlers.setCVSMutationHandler).toHaveBeenCalledWith({ - input: { - namespacePath: 'project/full/path', - enable: enabled, - }, - }); - - await waitForPromises(); - - expect(wrapper.emitted().overrideStatus).toHaveLength(1); - expect(wrapper.emitted().overrideStatus[0][0]).toBe(true); - }); -}); diff --git a/spec/frontend/security_configuration/components/feature_card_spec.js b/spec/frontend/security_configuration/components/feature_card_spec.js index ce2070e2036..02a95d2671e 100644 --- a/spec/frontend/security_configuration/components/feature_card_spec.js +++ b/spec/frontend/security_configuration/components/feature_card_spec.js @@ -1,6 +1,5 @@ import { GlIcon } from '@gitlab/ui'; import { mount } from '@vue/test-utils'; -import Vue, { nextTick } from 'vue'; import { extendedWrapper } from 'helpers/vue_test_utils_helper'; import { securityFeatures } from 'jest/security_configuration/mock_data'; import FeatureCard from '~/security_configuration/components/feature_card.vue'; @@ -14,10 +13,6 @@ import { import { manageViaMRErrorMessage } from '../constants'; import { makeFeature } from './utils'; -const MockComponent = Vue.component('MockComponent', { - render: (createElement) => createElement('span'), -}); - describe('FeatureCard component', () => { let feature; let wrapper; @@ -55,8 +50,6 @@ describe('FeatureCard component', () => { const findSecondarySection = () => wrapper.findByTestId('secondary-feature'); - const findSlotComponent = () => wrapper.findComponent(MockComponent); - const findFeatureStatus = () => wrapper.findByTestId('feature-status'); const expectAction = (action) => { @@ -398,35 +391,4 @@ describe('FeatureCard component', () => { }); }); }); - - describe('when a slot component is passed', () => { - beforeEach(() => { - feature = makeFeature({ - slotComponent: MockComponent, - }); - createComponent({ feature }); - }); - - it('renders the component properly', () => { - expect(wrapper.findComponent(MockComponent).exists()).toBe(true); - }); - }); - - describe('when the overrideStatus event is emitted', () => { - beforeEach(() => { - feature = makeFeature({ - slotComponent: MockComponent, - }); - createComponent({ feature }); - }); - - it('sets the overrideStatus', async () => { - expect(findFeatureStatus().html()).toContain('Available with Ultimate'); - - findSlotComponent().vm.$emit('overrideStatus', true); - await nextTick(); - - expect(findFeatureStatus().html()).toContain('Enabled'); - }); - }); }); diff --git a/spec/frontend/security_configuration/utils_spec.js b/spec/frontend/security_configuration/utils_spec.js index f1e2933d96b..f2eeaca8987 100644 --- a/spec/frontend/security_configuration/utils_spec.js +++ b/spec/frontend/security_configuration/utils_spec.js @@ -1,5 +1,5 @@ import { augmentFeatures, translateScannerNames } from '~/security_configuration/utils'; -import { SCANNER_NAMES_MAP, securityFeatures } from '~/security_configuration/constants'; +import { SCANNER_NAMES_MAP } from '~/security_configuration/constants'; describe('augmentFeatures', () => { const mockSecurityFeatures = [ @@ -12,16 +12,6 @@ describe('augmentFeatures', () => { }, ]; - const mockSecurityFeaturesWithSlot = [ - { - name: 'CONTAINER_REGISTRY', - type: 'CONTAINER_REGISTRY', - security_features: { - type: 'CONTAINER_REGISTRY', - }, - }, - ]; - const expectedMockSecurityFeatures = [ { name: 'SAST', @@ -32,16 +22,6 @@ describe('augmentFeatures', () => { }, ]; - const expectedMockSecurityWithSlotFeatures = [ - { - name: 'CONTAINER_REGISTRY', - type: 'CONTAINER_REGISTRY', - securityFeatures: { - type: 'CONTAINER_REGISTRY', - }, - }, - ]; - const expectedInvalidMockSecurityFeatures = [ { foo: 'bar', @@ -149,10 +129,6 @@ describe('augmentFeatures', () => { augmentedSecurityFeatures: expectedMockSecurityFeatures, }; - const expectedOutputWithSlot = { - augmentedSecurityFeatures: expectedMockSecurityWithSlotFeatures, - }; - const expectedInvalidOutputDefault = { augmentedSecurityFeatures: expectedInvalidMockSecurityFeatures, }; @@ -196,48 +172,32 @@ describe('augmentFeatures', () => { describe('returns an object with augmentedSecurityFeatures when', () => { it('given an properly formatted array', () => { - expect(augmentFeatures(securityFeatures, mockSecurityFeatures)).toEqual( - expectedOutputDefault, - ); + expect(augmentFeatures(mockSecurityFeatures)).toEqual(expectedOutputDefault); }); it('given an invalid populated array', () => { expect( - augmentFeatures(securityFeatures, [ - { ...mockSecurityFeatures[0], ...mockInvalidCustomFeature[0] }, - ]), + augmentFeatures([{ ...mockSecurityFeatures[0], ...mockInvalidCustomFeature[0] }]), ).toEqual(expectedInvalidOutputDefault); }); it('features have secondary key', () => { expect( - augmentFeatures(securityFeatures, [ - { ...mockSecurityFeatures[0], ...mockFeaturesWithSecondary[0] }, - ]), + augmentFeatures([{ ...mockSecurityFeatures[0], ...mockFeaturesWithSecondary[0] }]), ).toEqual(expectedOutputSecondary); }); it('given a valid populated array', () => { expect( - augmentFeatures(securityFeatures, [ - { ...mockSecurityFeatures[0], ...mockValidCustomFeature[0] }, - ]), + augmentFeatures([{ ...mockSecurityFeatures[0], ...mockValidCustomFeature[0] }]), ).toEqual(expectedOutputCustomFeature); }); - - it('when a custom vue slot is defined', () => { - expect(augmentFeatures(securityFeatures, mockSecurityFeaturesWithSlot)).toEqual( - expectedOutputWithSlot, - ); - }); }); describe('returns an object with camelcased keys', () => { it('given a customfeature in snakecase', () => { expect( - augmentFeatures(securityFeatures, [ - { ...mockSecurityFeatures[0], ...mockValidCustomFeatureSnakeCase[0] }, - ]), + augmentFeatures([{ ...mockSecurityFeatures[0], ...mockValidCustomFeatureSnakeCase[0] }]), ).toEqual(expectedOutputCustomFeature); }); }); @@ -245,7 +205,7 @@ describe('augmentFeatures', () => { describe('follows onDemandAvailable', () => { it('deletes badge when false', () => { expect( - augmentFeatures(securityFeatures, [ + augmentFeatures([ { ...mockSecurityFeaturesDast[0], ...mockValidCustomFeatureWithOnDemandAvailableFalse[0], @@ -256,7 +216,7 @@ describe('augmentFeatures', () => { it('keeps badge when true', () => { expect( - augmentFeatures(securityFeatures, [ + augmentFeatures([ { ...mockSecurityFeaturesDast[0], ...mockValidCustomFeatureWithOnDemandAvailableTrue[0] }, ]), ).toEqual(expectedOutputCustomFeatureWithOnDemandAvailableTrue); diff --git a/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb b/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb index a0158b039e3..9c2acb67ab6 100644 --- a/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb +++ b/spec/lib/gitlab/database/partitioning/list/convert_table_spec.rb @@ -320,6 +320,21 @@ RSpec.describe Gitlab::Database::Partitioning::List::ConvertTable, feature_categ expect(migration_context.has_loose_foreign_key?(parent_table_name)).to be_truthy end end + + context 'when table has FK referencing itself' do + before do + connection.execute(<<~SQL) + ALTER TABLE #{table_name} ADD COLUMN auto_canceled_by_id bigint; + ALTER TABLE #{table_name} ADD CONSTRAINT self_referencing FOREIGN KEY (#{partitioning_column}, auto_canceled_by_id) REFERENCES #{table_name} (#{partitioning_column}, id) ON DELETE SET NULL; + SQL + end + + it 'does not duplicate the FK', :aggregate_failures do + expect { partition }.not_to raise_error + + expect(migration_context.foreign_keys(parent_table_name).map(&:name)).not_to include("self_referencing") + end + end end context 'when a single partitioning value is given' do diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb index a84a3294465..ee921bf175f 100644 --- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb @@ -1180,17 +1180,19 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source let(:force) { false } let(:start_sha) { nil } let(:sign) { true } + let(:target_sha) { nil } subject do client.user_commit_files( user, 'my-branch', 'Commit files message', [], 'janedoe@example.com', 'Jane Doe', - 'master', repository, force, start_sha, sign) + 'master', repository, force, start_sha, sign, target_sha) end context 'when UserCommitFiles RPC is called' do let(:force) { true } let(:start_sha) { project.commit.id } let(:sign) { false } + let(:target_sha) { 'target_sha' } it 'successfully builds the header' do expect_any_instance_of(Gitaly::OperationService::Stub).to receive(:user_commit_files) do |_, req_enum| @@ -1199,6 +1201,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source expect(header.force).to eq(force) expect(header.start_sha).to eq(start_sha) expect(header.sign).to eq(sign) + expect(header.expected_old_oid).to eq(target_sha) end.and_return(Gitaly::UserCommitFilesResponse.new) subject @@ -1441,6 +1444,32 @@ RSpec.describe Gitlab::GitalyClient::OperationService, feature_category: :source end end end + + context 'with an invalid target_sha' do + context 'when the target_sha is not in a valid format' do + let(:target_sha) { 'asdf' } + + it 'raises CommandError' do + expect { subject }.to raise_error(Gitlab::Git::CommandError) + end + end + + context 'when the target_sha is valid but not present in the repo' do + let(:target_sha) { '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff0' } + + it 'raises CommandError' do + expect { subject }.to raise_error(Gitlab::Git::CommandError) + end + end + + context 'when the target_sha is present in the repo but is not the latest' do + let(:target_sha) { '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9' } + + it 'raises FailedPrecondition' do + expect { subject }.to raise_error(GRPC::FailedPrecondition) + end + end + end end end diff --git a/spec/models/ci/stage_spec.rb b/spec/models/ci/stage_spec.rb index 014bd83d17d..2b32f709b21 100644 --- a/spec/models/ci/stage_spec.rb +++ b/spec/models/ci/stage_spec.rb @@ -128,6 +128,54 @@ RSpec.describe Ci::Stage, :models, feature_category: :continuous_integration do end end + describe 'ordered statuses in stage' do + let_it_be(:stage) { create(:ci_stage, pipeline: pipeline, name: 'test') } + + describe '#ordered_latest_statuses' do + context 'with expected job order' do + let_it_be(:bridge_job) { create(:ci_bridge, :success, ci_stage: stage, name: 'bridge_job') } + let_it_be(:another_job) { create(:ci_build, ci_stage: stage, status: :manual, name: 'another_job') } + let_it_be(:job) { create(:ci_build, :success, ci_stage: stage, name: 'job') } + let_it_be(:another_job_2) { create(:ci_build, ci_stage: stage, status: :skipped, name: 'another_job_2') } + + it 'returns the stage with jobs in the correct order' do + job_names = stage.ordered_latest_statuses + expected_order = [another_job, bridge_job, job, another_job_2] + + expect(job_names).to eq(expected_order) + end + end + end + + describe '#ordered_retried_statuses' do + let_it_be(:retried_job_1) do + create(:ci_build, :retried, ci_stage: stage, status: :success, name: 'retried_job_1') + end + + let_it_be(:retried_job_2) { create(:ci_build, :retried, ci_stage: stage, status: :failed, name: 'retried_job_2') } + let_it_be(:retried_job_3) do + create(:ci_build, :retried, ci_stage: stage, status: :canceled, name: 'retried_job_3') + end + + let_it_be(:retried_job_4) do + create(:ci_build, :retried, ci_stage: stage, status: :running, name: 'retried_job_4') + end + + let_it_be(:retried_job_5) do + create(:ci_build, :retried, ci_stage: stage, status: :pending, name: 'retried_job_5') + end + + it 'returns retried statuses in the correct order based on ORDERED_STATUSES' do + ordered_statuses = stage.ordered_retried_statuses + + expected_order = %w[failed pending running canceled success] + expect(ordered_statuses.map(&:status)).to eq(expected_order) + expect(ordered_statuses.map(&:name)).to eq(%w[retried_job_2 retried_job_5 retried_job_4 retried_job_3 + retried_job_1]) + end + end + end + describe '#update_status' do context 'when stage objects needs to be updated' do before do diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb index 4bb9a20c68a..3d05524aac4 100644 --- a/spec/models/commit_spec.rb +++ b/spec/models/commit_spec.rb @@ -17,6 +17,7 @@ RSpec.describe Commit do it { is_expected.to include_module(Referable) } it { is_expected.to include_module(StaticModel) } it { is_expected.to include_module(Presentable) } + it { is_expected.to include_module(GlobalID::Identification) } end describe '.lazy' do diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb index 58ed4a56a00..bc912f185a2 100644 --- a/spec/models/repository_spec.rb +++ b/spec/models/repository_spec.rb @@ -4307,19 +4307,16 @@ RSpec.describe Repository, feature_category: :source_code_management do end describe '#commit_files' do - let(:project) { create(:project, :empty_repo) } - - it 'calls UserCommitFiles RPC' do - expect_next_instance_of(Gitlab::GitalyClient::OperationService) do |client| - expect(client).to receive(:user_commit_files).with( - user, 'extra-branch', 'commit message', [], - 'author email', 'author name', nil, nil, true, nil, false - ) - end + let_it_be(:project) { create(:project, :repository) } + let(:target_sha) { repository.commit('master').sha } + let(:expected_params) do + [user, 'master', 'commit message', [], 'author email', 'author name', nil, nil, true, nil, false, target_sha] + end + subject do repository.commit_files( user, - branch_name: 'extra-branch', + branch_name: 'master', message: 'commit message', author_name: 'author name', author_email: 'author email', @@ -4328,5 +4325,44 @@ RSpec.describe Repository, feature_category: :source_code_management do sign: false ) end + + it 'finds and passes the branches target_sha' do + expect_next_instance_of(Gitlab::GitalyClient::OperationService) do |client| + expect(client).to receive(:user_commit_files).with(*expected_params) + end + + subject + end + + context 'when validate_target_sha_in_user_commit_files feature flag is disabled' do + let_it_be(:project) { create(:project, :repository) } + let(:target_sha) { nil } + + before do + stub_feature_flags(validate_target_sha_in_user_commit_files: false) + end + + it 'does not find or pass the branches target_sha' do + expect_next_instance_of(Gitlab::GitalyClient::OperationService) do |client| + expect(client).to receive(:user_commit_files).with(*expected_params) + end + expect(repository).not_to receive(:commit) + + subject + end + end + + context 'with an empty branch' do + let_it_be(:project) { create(:project, :empty_repo) } + let(:target_sha) { nil } + + it 'calls UserCommitFiles RPC' do + expect_next_instance_of(Gitlab::GitalyClient::OperationService) do |client| + expect(client).to receive(:user_commit_files).with(*expected_params) + end + + subject + end + end end end diff --git a/spec/requests/api/graphql/query_spec.rb b/spec/requests/api/graphql/query_spec.rb index 0602cfec149..79d76bffb10 100644 --- a/spec/requests/api/graphql/query_spec.rb +++ b/spec/requests/api/graphql/query_spec.rb @@ -147,5 +147,50 @@ RSpec.describe 'Query', feature_category: :shared do expect(graphql_data['ciPipelineStage']).to be_nil end end + + context 'with expected job order' do + let_it_be(:pipeline) { create(:ci_pipeline, project: project) } + let_it_be(:stage) { create(:ci_stage, pipeline: pipeline, name: 'test') } + let_it_be(:bridge_job) { create(:ci_bridge, :success, ci_stage: stage, name: 'bridge_job') } + let_it_be(:another_job) { create(:ci_build, ci_stage: stage, status: :manual, name: 'another_job') } + let_it_be(:job) { create(:ci_build, :success, ci_stage: stage, name: 'job') } + let_it_be(:another_job_2) { create(:ci_build, ci_stage: stage, status: :skipped, name: 'another_job_2') } + + let(:query) do + <<~GRAPHQL + { + ciPipelineStage(id: "#{stage.to_global_id}") { + id + name + jobs { + nodes { + name + } + } + } + } + GRAPHQL + end + + context 'when the current user has access to the stage' do + before do + project.add_developer(current_user) + end + + it 'returns the stage with jobs in the correct order' do + post_graphql(query, current_user: current_user) + + expect(response).to have_gitlab_http_status(:ok) + + stage_data = graphql_data['ciPipelineStage'] + expect(stage_data['name']).to eq('test') + + job_names = stage_data['jobs']['nodes'].pluck('name') + expected_order = %w[another_job bridge_job job another_job_2] + + expect(job_names).to eq(expected_order) + end + end + end end end diff --git a/spec/requests/api/usage_data_spec.rb b/spec/requests/api/usage_data_spec.rb index 3151589f199..d7f2ad3b4c7 100644 --- a/spec/requests/api/usage_data_spec.rb +++ b/spec/requests/api/usage_data_spec.rb @@ -202,6 +202,26 @@ RSpec.describe API::UsageData, feature_category: :service_ping do end end + context 'with oauth token that has ai_workflows scope' do + let(:oauth_access_token) { create(:oauth_access_token, user: user, scopes: [:ai_workflows]) } + let(:params) { { event: known_event } } + + it 'allows access' do + expect(Gitlab::InternalEvents).to receive(:track_event) + .with( + known_event, + send_snowplow_event: false, + user: user, + namespace: nil, + project: nil, + additional_properties: {} + ) + post api(endpoint, oauth_access_token: oauth_access_token), params: params + + expect(response).to have_gitlab_http_status(:ok) + end + end + context 'with usage ping enabled' do let_it_be(:namespace) { create(:namespace) } let_it_be(:project) { create(:project) } diff --git a/spec/requests/projects/pipelines_controller_spec.rb b/spec/requests/projects/pipelines_controller_spec.rb index 8be4fecea04..268c016a1a1 100644 --- a/spec/requests/projects/pipelines_controller_spec.rb +++ b/spec/requests/projects/pipelines_controller_spec.rb @@ -145,6 +145,24 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte expect(response).to have_gitlab_http_status(:ok) end + + it 'returns retried builds in the correct order' do + create(:ci_build, :retried, :failed, pipeline: pipeline, stage: 'build', name: 'retried_job_1') + create(:ci_build, :retried, :success, pipeline: pipeline, stage: 'build', name: 'retried_job_2') + create(:ci_build, :retried, :running, pipeline: pipeline, stage: 'build', name: 'retried_job_3') + create(:ci_build, :retried, :canceled, pipeline: pipeline, stage: 'build', name: 'retried_job_4') + create(:ci_build, :retried, :pending, pipeline: pipeline, stage: 'build', name: 'retried_job_5') + + request_build_stage(retried: true) + + expect(response).to have_gitlab_http_status(:ok) + + retried_jobs = json_response['retried'] + job_names = retried_jobs.pluck('name') + expected_order = %w[retried_job_1 retried_job_5 retried_job_3 retried_job_4 retried_job_2] + + expect(job_names).to eq(expected_order) + end end def request_build_stage(params = {}) diff --git a/spec/serializers/stage_entity_spec.rb b/spec/serializers/stage_entity_spec.rb index b3f08f2aee3..7adb5292ae0 100644 --- a/spec/serializers/stage_entity_spec.rb +++ b/spec/serializers/stage_entity_spec.rb @@ -126,5 +126,39 @@ RSpec.describe StageEntity, feature_category: :continuous_integration do expect { serialize(stage) }.not_to exceed_query_limit(control) end end + + context 'when details: true and retried: true' do + let(:pipeline) { create(:ci_pipeline) } + let(:stage) { create(:ci_stage, pipeline: pipeline, status: :success) } + let(:entity) { described_class.new(stage, request: request, details: true, retried: true) } + + before do + create(:ci_build, :success, pipeline: pipeline, stage_id: stage.id, name: 'latest_job') + create(:ci_build, :retried, pipeline: pipeline, stage_id: stage.id, name: 'retried_job') + create(:ci_build, :failed, pipeline: pipeline, stage_id: stage.id, name: 'failed_job') + end + + it 'exposes latest_statuses and retried' do + result = entity.as_json + + expect(result).to include(:latest_statuses, :retried) + expect(result[:latest_statuses].map { |job| job[:name] }).to include('failed_job', 'latest_job') + expect(result[:retried].map { |job| job[:name] }).to eq(['retried_job']) + end + + it 'does not expose latest_statuses when details is false' do + result = described_class.new(stage, request: request, retried: true).as_json + + expect(result).not_to include(:latest_statuses) + expect(result).to include(:retried) + end + + it 'does not expose retried when retried is false' do + result = described_class.new(stage, request: request, details: true).as_json + + expect(result).to include(:latest_statuses) + expect(result).not_to include(:retried) + end + end end end diff --git a/spec/services/ci/partitions/setup_default_service_spec.rb b/spec/services/ci/partitions/setup_default_service_spec.rb index b864a216f06..013d772688a 100644 --- a/spec/services/ci/partitions/setup_default_service_spec.rb +++ b/spec/services/ci/partitions/setup_default_service_spec.rb @@ -12,16 +12,6 @@ RSpec.describe Ci::Partitions::SetupDefaultService, feature_category: :ci_scalin describe '.execute' do subject(:execute) { service.execute } - context 'when ci_partitioning_first_records is disabled' do - before do - stub_feature_flags(ci_partitioning_first_records: false) - end - - it 'does not create the default ci_partitions' do - expect { execute }.not_to change { Ci::Partition } - end - end - context 'when current ci_partition exists' do let!(:current_partition) { create(:ci_partition, :current) } diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb index bbde629e345..c8ea024282c 100644 --- a/spec/services/design_management/save_designs_service_spec.rb +++ b/spec/services/design_management/save_designs_service_spec.rb @@ -366,7 +366,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m expect { service.execute } .to change { issue.designs.count }.from(0).to(2) .and change { DesignManagement::Version.count }.by(1) - .and change { Gitlab::GitalyClient.get_request_count }.by(3) + .and change { Gitlab::GitalyClient.get_request_count }.by(4) .and change { commit_count }.by(1) .and trigger_internal_events(Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_ADDED) .twice.with(user: user, project: project, category: 'InternalEventTracking') diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb index d7546ac08f6..77df0b5921d 100644 --- a/spec/services/repositories/changelog_service_spec.rb +++ b/spec/services/repositories/changelog_service_spec.rb @@ -265,6 +265,8 @@ RSpec.describe Repositories::ChangelogService, feature_category: :source_code_ma end def create_commit(project, user, params) + RequestStore.clear! + params = { start_branch: 'master', branch_name: 'master' }.merge(params) Files::MultiService.new(project, user, params).execute.fetch(:result) end diff --git a/spec/services/snippets/create_service_spec.rb b/spec/services/snippets/create_service_spec.rb index 8c288e94f73..5254a318221 100644 --- a/spec/services/snippets/create_service_spec.rb +++ b/spec/services/snippets/create_service_spec.rb @@ -118,6 +118,14 @@ RSpec.describe Snippets::CreateService, feature_category: :source_code_managemen expect(blob.data).to eq base_opts[:content] end + it 'passes along correct commit attributes' do + expect_next_instance_of(Repository) do |repository| + expect(repository).to receive(:commit_files).with(anything, a_hash_including(skip_target_sha: true)) + end + + subject + end + context 'when repository creation action fails' do before do allow_next_instance_of(Snippet) do |instance| diff --git a/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb b/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb index 25a72d46a7b..31811527259 100644 --- a/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb +++ b/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb @@ -154,7 +154,7 @@ RSpec.shared_examples_for 'services security ci configuration create service' do let_it_be(:repository) { project.repository } it 'is successful' do - expect(repository).to receive(:commit).and_return(nil) + expect(repository).to receive(:commit).twice.and_return(nil) expect(result.status).to eq(:success) end end