Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
23d951df2d
commit
578fc86533
|
|
@ -75,3 +75,16 @@ ui-docs-links lint:
|
||||||
needs: []
|
needs: []
|
||||||
script:
|
script:
|
||||||
- bundle exec haml-lint -i DocumentationLinks
|
- bundle exec haml-lint -i DocumentationLinks
|
||||||
|
|
||||||
|
deprecations-doc check:
|
||||||
|
variables:
|
||||||
|
SETUP_DB: "false"
|
||||||
|
extends:
|
||||||
|
- .default-retry
|
||||||
|
- .rails-cache
|
||||||
|
- .default-before_script
|
||||||
|
- .docs:rules:deprecations
|
||||||
|
stage: test
|
||||||
|
needs: []
|
||||||
|
script:
|
||||||
|
- bundle exec rake gitlab:docs:check_deprecations
|
||||||
|
|
|
||||||
|
|
@ -147,6 +147,13 @@
|
||||||
- ".markdownlint.yml"
|
- ".markdownlint.yml"
|
||||||
- "scripts/lint-doc.sh"
|
- "scripts/lint-doc.sh"
|
||||||
|
|
||||||
|
.docs-deprecations-patterns: &docs-deprecations-patterns
|
||||||
|
- "doc/deprecations/index.md"
|
||||||
|
- "data/deprecations/*.yml"
|
||||||
|
- "data/deprecations/templates/_deprecation_template.md.erb"
|
||||||
|
- "lib/tasks/gitlab/docs/compile_deprecations.rake"
|
||||||
|
- "tooling/deprecations/docs.rb"
|
||||||
|
|
||||||
.bundler-patterns: &bundler-patterns
|
.bundler-patterns: &bundler-patterns
|
||||||
- '{Gemfile.lock,*/Gemfile.lock,*/*/Gemfile.lock}'
|
- '{Gemfile.lock,*/Gemfile.lock,*/*/Gemfile.lock}'
|
||||||
|
|
||||||
|
|
@ -453,6 +460,12 @@
|
||||||
changes: *docs-patterns
|
changes: *docs-patterns
|
||||||
when: on_success
|
when: on_success
|
||||||
|
|
||||||
|
.docs:rules:deprecations:
|
||||||
|
rules:
|
||||||
|
- <<: *if-default-refs
|
||||||
|
changes: *docs-deprecations-patterns
|
||||||
|
when: on_success
|
||||||
|
|
||||||
##################
|
##################
|
||||||
# GraphQL rules #
|
# GraphQL rules #
|
||||||
##################
|
##################
|
||||||
|
|
|
||||||
|
|
@ -308,6 +308,7 @@ module Ci
|
||||||
scope :ci_and_parent_sources, -> { where(source: Enums::Ci::Pipeline.ci_and_parent_sources.values) }
|
scope :ci_and_parent_sources, -> { where(source: Enums::Ci::Pipeline.ci_and_parent_sources.values) }
|
||||||
scope :for_user, -> (user) { where(user: user) }
|
scope :for_user, -> (user) { where(user: user) }
|
||||||
scope :for_sha, -> (sha) { where(sha: sha) }
|
scope :for_sha, -> (sha) { where(sha: sha) }
|
||||||
|
scope :where_not_sha, -> (sha) { where.not(sha: sha) }
|
||||||
scope :for_source_sha, -> (source_sha) { where(source_sha: source_sha) }
|
scope :for_source_sha, -> (source_sha) { where(source_sha: source_sha) }
|
||||||
scope :for_sha_or_source_sha, -> (sha) { for_sha(sha).or(for_source_sha(sha)) }
|
scope :for_sha_or_source_sha, -> (sha) { for_sha(sha).or(for_source_sha(sha)) }
|
||||||
scope :for_ref, -> (ref) { where(ref: ref) }
|
scope :for_ref, -> (ref) { where(ref: ref) }
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,9 @@ module Ci
|
||||||
module Sources
|
module Sources
|
||||||
class Pipeline < Ci::ApplicationRecord
|
class Pipeline < Ci::ApplicationRecord
|
||||||
include Ci::NamespacedModelName
|
include Ci::NamespacedModelName
|
||||||
|
include IgnorableColumns
|
||||||
|
|
||||||
|
ignore_columns 'source_job_id_convert_to_bigint', remove_with: '14.5', remove_after: '2021-11-22'
|
||||||
|
|
||||||
self.table_name = "ci_sources_pipelines"
|
self.table_name = "ci_sources_pipelines"
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -52,8 +52,7 @@ module FeatureFlags
|
||||||
scopes = strategy.scopes
|
scopes = strategy.scopes
|
||||||
.map { |scope| %Q("#{scope.environment_scope}") }
|
.map { |scope| %Q("#{scope.environment_scope}") }
|
||||||
.join(', ')
|
.join(', ')
|
||||||
%Q(Created strategy \"#{strategy.name}\" )\
|
%Q(Created strategy "#{strategy.name}" with scopes #{scopes}.)
|
||||||
"with scopes #{scopes}."
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def feature_flag_by_name
|
def feature_flag_by_name
|
||||||
|
|
|
||||||
|
|
@ -2,10 +2,9 @@
|
||||||
stage: none
|
stage: none
|
||||||
group: none
|
group: none
|
||||||
info: "See the Technical Writers assigned to Development Guidelines: https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments-to-development-guidelines"
|
info: "See the Technical Writers assigned to Development Guidelines: https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments-to-development-guidelines"
|
||||||
description: "View features that are currently deprecated"
|
|
||||||
---
|
---
|
||||||
|
|
||||||
# Deprecated features by planned removal milestone
|
# Deprecated feature removal schedule
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
This page is automatically generated from the YAML files in `/data/deprecations` by the rake task
|
This page is automatically generated from the YAML files in `/data/deprecations` by the rake task
|
||||||
|
|
@ -16,15 +15,16 @@ Do not edit this page directly.
|
||||||
To add a deprecation, use the example.yml file in `/data/deprecations/templates` as a template,
|
To add a deprecation, use the example.yml file in `/data/deprecations/templates` as a template,
|
||||||
then run `bin/rake gitlab:docs:compile_deprecations`.
|
then run `bin/rake gitlab:docs:compile_deprecations`.
|
||||||
-->
|
-->
|
||||||
|
<% if milestones.any? %>
|
||||||
|
<%- milestones.each do |milestone| %>
|
||||||
|
## <%= milestone %>
|
||||||
|
<%- deprecations.select{|d| d["removal_milestone"] == milestone}.each do |deprecation| %>
|
||||||
|
### <%= deprecation["name"]%>
|
||||||
|
|
||||||
<% if milestones.any? -%>
|
<%= deprecation["body"] -%>
|
||||||
<% milestones.each do |milestone| %>
|
<%- end -%>
|
||||||
### <%= milestone %>
|
<%- end -%>
|
||||||
<% deprecations.select{|d| d["removal_milestone"] == milestone}.each do |deprecation| %>
|
<%- else -%>
|
||||||
#### <%= deprecation["name"] %>
|
|
||||||
<%= deprecation["body"]%>
|
Deprecated features scheduled for removal will be listed here, sorted by GitLab milestone.
|
||||||
<% end %>
|
|
||||||
<% end %>
|
|
||||||
<% else -%>
|
|
||||||
## There are no deprecated features for this version of GitLab
|
|
||||||
<% end -%>
|
<% end -%>
|
||||||
|
|
|
||||||
|
|
@ -10,8 +10,8 @@
|
||||||
#
|
#
|
||||||
# Please delete this line and above before submitting your merge request.
|
# Please delete this line and above before submitting your merge request.
|
||||||
|
|
||||||
- name: # The name of the feature to be deprecated
|
- name: "Feature name" # The name of the feature to be deprecated
|
||||||
removal_milestone: # XX.YY format - the milestone when this feature is planned to be removed
|
removal_milestone: "XX.YY" # the milestone when this feature is planned to be removed
|
||||||
body: | # Do not modify this line, instead modify the lines below.
|
body: | # Do not modify this line, instead modify the lines below.
|
||||||
<!-- START OF BODY COMMENT
|
<!-- START OF BODY COMMENT
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,23 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class RemoveDuplicateDastSiteTokensWithSameToken < ActiveRecord::Migration[6.1]
|
||||||
|
include Gitlab::Database::MigrationHelpers
|
||||||
|
|
||||||
|
INDEX_NAME = 'index_dast_site_token_on_token'
|
||||||
|
|
||||||
|
# rubocop: disable Migration/AddIndex
|
||||||
|
def up
|
||||||
|
execute("WITH duplicate_tokens AS(
|
||||||
|
SELECT id, rank() OVER (PARTITION BY token ORDER BY id) r FROM dast_site_tokens
|
||||||
|
)
|
||||||
|
DELETE FROM dast_site_tokens c USING duplicate_tokens t
|
||||||
|
WHERE c.id = t.id AND t.r > 1;")
|
||||||
|
|
||||||
|
add_index :dast_site_tokens, :token, name: INDEX_NAME, unique: true
|
||||||
|
end
|
||||||
|
|
||||||
|
# rubocop: disable Migration/RemoveIndex
|
||||||
|
def down
|
||||||
|
remove_index :dast_site_tokens, :token, name: INDEX_NAME
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -0,0 +1,18 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class RescheduleExtractProjectTopicsIntoSeparateTable < ActiveRecord::Migration[6.0]
|
||||||
|
include Gitlab::Database::MigrationHelpers
|
||||||
|
|
||||||
|
MIGRATION = 'ExtractProjectTopicsIntoSeparateTable'
|
||||||
|
DELAY_INTERVAL = 4.minutes
|
||||||
|
|
||||||
|
disable_ddl_transaction!
|
||||||
|
|
||||||
|
def up
|
||||||
|
requeue_background_migration_jobs_by_range_at_intervals(MIGRATION, DELAY_INTERVAL)
|
||||||
|
end
|
||||||
|
|
||||||
|
def down
|
||||||
|
# no-op
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -0,0 +1,28 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class DropTemporaryTriggerForCiSourcesPipelines < Gitlab::Database::Migration[1.0]
|
||||||
|
disable_ddl_transaction!
|
||||||
|
|
||||||
|
TABLE = 'ci_sources_pipelines'
|
||||||
|
TEMPORARY_COLUMN = 'source_job_id_convert_to_bigint'
|
||||||
|
MAIN_COLUMN = 'source_job_id'
|
||||||
|
TRIGGER = 'trigger_8485e97c00e3'
|
||||||
|
|
||||||
|
# rubocop:disable Migration/WithLockRetriesDisallowedMethod
|
||||||
|
def up
|
||||||
|
check_trigger_permissions!(TABLE)
|
||||||
|
|
||||||
|
with_lock_retries do
|
||||||
|
remove_rename_triggers(TABLE, TRIGGER)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def down
|
||||||
|
check_trigger_permissions!(TABLE)
|
||||||
|
|
||||||
|
with_lock_retries do
|
||||||
|
install_rename_triggers(TABLE, MAIN_COLUMN, TEMPORARY_COLUMN, trigger_name: TRIGGER)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
# rubocop:enable Migration/WithLockRetriesDisallowedMethod
|
||||||
|
end
|
||||||
|
|
@ -0,0 +1,55 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class SliceMergeRequestDiffCommitMigrations < ActiveRecord::Migration[6.1]
|
||||||
|
include Gitlab::Database::MigrationHelpers
|
||||||
|
|
||||||
|
BATCH_SIZE = 5_000
|
||||||
|
MIGRATION_CLASS = 'MigrateMergeRequestDiffCommitUsers'
|
||||||
|
STEAL_MIGRATION_CLASS = 'StealMigrateMergeRequestDiffCommitUsers'
|
||||||
|
|
||||||
|
def up
|
||||||
|
old_jobs = Gitlab::Database::BackgroundMigrationJob
|
||||||
|
.for_migration_class(MIGRATION_CLASS)
|
||||||
|
.pending
|
||||||
|
.to_a
|
||||||
|
|
||||||
|
return if old_jobs.empty?
|
||||||
|
|
||||||
|
# This ensures we stop processing the old ranges, as the background
|
||||||
|
# migrations skip already processed jobs.
|
||||||
|
Gitlab::Database::BackgroundMigrationJob
|
||||||
|
.for_migration_class(MIGRATION_CLASS)
|
||||||
|
.pending
|
||||||
|
.update_all(status: :succeeded)
|
||||||
|
|
||||||
|
rows = []
|
||||||
|
|
||||||
|
old_jobs.each do |job|
|
||||||
|
min, max = job.arguments
|
||||||
|
|
||||||
|
while min < max
|
||||||
|
rows << {
|
||||||
|
class_name: MIGRATION_CLASS,
|
||||||
|
arguments: [min, min + BATCH_SIZE],
|
||||||
|
created_at: Time.now.utc,
|
||||||
|
updated_at: Time.now.utc
|
||||||
|
}
|
||||||
|
|
||||||
|
min += BATCH_SIZE
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
Gitlab::Database::BackgroundMigrationJob.insert_all!(rows)
|
||||||
|
|
||||||
|
job = Gitlab::Database::BackgroundMigrationJob
|
||||||
|
.for_migration_class(MIGRATION_CLASS)
|
||||||
|
.pending
|
||||||
|
.first
|
||||||
|
|
||||||
|
migrate_in(1.hour, STEAL_MIGRATION_CLASS, job.arguments)
|
||||||
|
end
|
||||||
|
|
||||||
|
def down
|
||||||
|
# no-op
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
a7f4911fcb9ab939a6e5e9a6e5e927fd6828ff062324d8483d78c8f8a4ded4e6
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
84a68304f95ae04b85625c214b28a251014582fb142390ff3df8ea6d6f0947e1
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
36572ad3a4a8a7511512ff45e2a68252950ce852af1b5a28c6b4e4491f97be07
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
34287b86616026b94374856991c793ad869c52badddc09be923984002c6214bd
|
||||||
|
|
@ -107,15 +107,6 @@ BEGIN
|
||||||
END;
|
END;
|
||||||
$$;
|
$$;
|
||||||
|
|
||||||
CREATE FUNCTION trigger_8485e97c00e3() RETURNS trigger
|
|
||||||
LANGUAGE plpgsql
|
|
||||||
AS $$
|
|
||||||
BEGIN
|
|
||||||
NEW."source_job_id_convert_to_bigint" := NEW."source_job_id";
|
|
||||||
RETURN NEW;
|
|
||||||
END;
|
|
||||||
$$;
|
|
||||||
|
|
||||||
CREATE FUNCTION trigger_8487d4de3e7b() RETURNS trigger
|
CREATE FUNCTION trigger_8487d4de3e7b() RETURNS trigger
|
||||||
LANGUAGE plpgsql
|
LANGUAGE plpgsql
|
||||||
AS $$
|
AS $$
|
||||||
|
|
@ -24813,6 +24804,8 @@ CREATE UNIQUE INDEX index_dast_site_profiles_pipelines_on_ci_pipeline_id ON dast
|
||||||
|
|
||||||
CREATE UNIQUE INDEX index_dast_site_token_on_project_id_and_url ON dast_site_tokens USING btree (project_id, url);
|
CREATE UNIQUE INDEX index_dast_site_token_on_project_id_and_url ON dast_site_tokens USING btree (project_id, url);
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX index_dast_site_token_on_token ON dast_site_tokens USING btree (token);
|
||||||
|
|
||||||
CREATE INDEX index_dast_site_tokens_on_project_id ON dast_site_tokens USING btree (project_id);
|
CREATE INDEX index_dast_site_tokens_on_project_id ON dast_site_tokens USING btree (project_id);
|
||||||
|
|
||||||
CREATE INDEX index_dast_site_validations_on_dast_site_token_id ON dast_site_validations USING btree (dast_site_token_id);
|
CREATE INDEX index_dast_site_validations_on_dast_site_token_id ON dast_site_validations USING btree (dast_site_token_id);
|
||||||
|
|
@ -27327,8 +27320,6 @@ CREATE TRIGGER trigger_542d6c2ad72e BEFORE INSERT OR UPDATE ON ci_builds_metadat
|
||||||
|
|
||||||
CREATE TRIGGER trigger_77f5e1d20482 BEFORE INSERT OR UPDATE ON deployments FOR EACH ROW EXECUTE FUNCTION trigger_77f5e1d20482();
|
CREATE TRIGGER trigger_77f5e1d20482 BEFORE INSERT OR UPDATE ON deployments FOR EACH ROW EXECUTE FUNCTION trigger_77f5e1d20482();
|
||||||
|
|
||||||
CREATE TRIGGER trigger_8485e97c00e3 BEFORE INSERT OR UPDATE ON ci_sources_pipelines FOR EACH ROW EXECUTE FUNCTION trigger_8485e97c00e3();
|
|
||||||
|
|
||||||
CREATE TRIGGER trigger_8487d4de3e7b BEFORE INSERT OR UPDATE ON ci_builds_metadata FOR EACH ROW EXECUTE FUNCTION trigger_8487d4de3e7b();
|
CREATE TRIGGER trigger_8487d4de3e7b BEFORE INSERT OR UPDATE ON ci_builds_metadata FOR EACH ROW EXECUTE FUNCTION trigger_8487d4de3e7b();
|
||||||
|
|
||||||
CREATE TRIGGER trigger_91dc388a5fe6 BEFORE INSERT OR UPDATE ON dep_ci_build_trace_sections FOR EACH ROW EXECUTE FUNCTION trigger_91dc388a5fe6();
|
CREATE TRIGGER trigger_91dc388a5fe6 BEFORE INSERT OR UPDATE ON dep_ci_build_trace_sections FOR EACH ROW EXECUTE FUNCTION trigger_91dc388a5fe6();
|
||||||
|
|
|
||||||
|
|
@ -340,6 +340,12 @@ disable enforcement. For more information, see the documentation on configuring
|
||||||
1. Run `sudo -u git /home/git/gitaly/gitaly-hooks check /home/git/gitaly/config.toml`
|
1. Run `sudo -u git /home/git/gitaly/gitaly-hooks check /home/git/gitaly/config.toml`
|
||||||
to confirm that Gitaly can perform callbacks to the GitLab internal API.
|
to confirm that Gitaly can perform callbacks to the GitLab internal API.
|
||||||
|
|
||||||
|
WARNING:
|
||||||
|
If directly copying repository data from a GitLab server to Gitaly, ensure that the metadata file,
|
||||||
|
default path `/var/opt/gitlab/git-data/repositories/.gitaly-metadata`, is not included in the transfer.
|
||||||
|
Copying this file causes GitLab to use the [Rugged patches](index.md#direct-access-to-git-in-gitlab) for repositories hosted on the Gitaly server,
|
||||||
|
leading to `Error creating pipeline` and `Commit not found` errors, or stale data.
|
||||||
|
|
||||||
### Configure Gitaly clients
|
### Configure Gitaly clients
|
||||||
|
|
||||||
As the final step, you must update Gitaly clients to switch from using local Gitaly service to use
|
As the final step, you must update Gitaly clients to switch from using local Gitaly service to use
|
||||||
|
|
|
||||||
|
|
@ -539,12 +539,6 @@ To see if GitLab can access the repository file system directly, we use the foll
|
||||||
Direct Git access is enable by default in Omnibus GitLab because it fills in the correct repository
|
Direct Git access is enable by default in Omnibus GitLab because it fills in the correct repository
|
||||||
paths in the GitLab configuration file `config/gitlab.yml`. This satisfies the UUID check.
|
paths in the GitLab configuration file `config/gitlab.yml`. This satisfies the UUID check.
|
||||||
|
|
||||||
WARNING:
|
|
||||||
If directly copying repository data from a GitLab server to Gitaly, ensure that the metadata file,
|
|
||||||
default path `/var/opt/gitlab/git-data/repositories/.gitaly-metadata`, is not included in the transfer.
|
|
||||||
Copying this file causes GitLab to use the Rugged patches for repositories hosted on the Gitaly server,
|
|
||||||
leading to `Error creating pipeline` and `Commit not found` errors, or stale data.
|
|
||||||
|
|
||||||
### Transition to Gitaly Cluster
|
### Transition to Gitaly Cluster
|
||||||
|
|
||||||
For the sake of removing complexity, we must remove direct Git access in GitLab. However, we can't
|
For the sake of removing complexity, we must remove direct Git access in GitLab. However, we can't
|
||||||
|
|
|
||||||
|
|
@ -443,6 +443,15 @@ When possible, try to avoid acronyms in headings.
|
||||||
- List item 2
|
- List item 2
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Comments
|
||||||
|
|
||||||
|
To embed comments within Markdown, use standard HTML comments that are not rendered
|
||||||
|
when published. Example:
|
||||||
|
|
||||||
|
```html
|
||||||
|
<!-- This is a comment that is not rendered -->
|
||||||
|
```
|
||||||
|
|
||||||
### Emphasis
|
### Emphasis
|
||||||
|
|
||||||
- Use double asterisks (`**`) to mark a word or text in bold (`**bold**`).
|
- Use double asterisks (`**`) to mark a word or text in bold (`**bold**`).
|
||||||
|
|
|
||||||
|
|
@ -16,9 +16,16 @@ It combines [feature-driven development](https://en.wikipedia.org/wiki/Feature-d
|
||||||
|
|
||||||
Organizations coming to Git from other version control systems frequently find it hard to develop a productive workflow.
|
Organizations coming to Git from other version control systems frequently find it hard to develop a productive workflow.
|
||||||
This article describes GitLab flow, which integrates the Git workflow with an issue tracking system.
|
This article describes GitLab flow, which integrates the Git workflow with an issue tracking system.
|
||||||
It offers a transparent and effective way to work with Git.
|
It offers a transparent and effective way to work with Git:
|
||||||
|
|
||||||

|
```mermaid
|
||||||
|
graph LR
|
||||||
|
subgraph Git workflow
|
||||||
|
A[Working copy] --> |git add| B[Index]
|
||||||
|
B --> |git commit| C[Local repository]
|
||||||
|
C --> |git push| D[Remote repository]
|
||||||
|
end
|
||||||
|
```
|
||||||
|
|
||||||
When converting to Git, you have to get used to the fact that it takes three steps to share a commit with colleagues.
|
When converting to Git, you have to get used to the fact that it takes three steps to share a commit with colleagues.
|
||||||
Most version control systems have only one step: committing from the working copy to a shared server.
|
Most version control systems have only one step: committing from the working copy to a shared server.
|
||||||
|
|
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 7.0 KiB |
|
|
@ -2,10 +2,9 @@
|
||||||
stage: none
|
stage: none
|
||||||
group: none
|
group: none
|
||||||
info: "See the Technical Writers assigned to Development Guidelines: https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments-to-development-guidelines"
|
info: "See the Technical Writers assigned to Development Guidelines: https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments-to-development-guidelines"
|
||||||
description: "View features that are currently deprecated"
|
|
||||||
---
|
---
|
||||||
|
|
||||||
# Deprecated features by planned removal milestone
|
# Deprecated feature removal schedule
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
This page is automatically generated from the YAML files in `/data/deprecations` by the rake task
|
This page is automatically generated from the YAML files in `/data/deprecations` by the rake task
|
||||||
|
|
@ -17,4 +16,4 @@ To add a deprecation, use the example.yml file in `/data/deprecations/templates`
|
||||||
then run `bin/rake gitlab:docs:compile_deprecations`.
|
then run `bin/rake gitlab:docs:compile_deprecations`.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
## There are no deprecated features for this version of GitLab
|
Deprecated features scheduled for removal will be listed here, sorted by GitLab milestone.
|
||||||
|
|
@ -26,12 +26,21 @@ module Gitlab
|
||||||
belongs_to :topic
|
belongs_to :topic
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Temporary AR table for projects
|
||||||
|
class Project < ActiveRecord::Base
|
||||||
|
self.table_name = 'projects'
|
||||||
|
end
|
||||||
|
|
||||||
def perform(start_id, stop_id)
|
def perform(start_id, stop_id)
|
||||||
Tagging.includes(:tag).where(taggable_type: 'Project', id: start_id..stop_id).each do |tagging|
|
Tagging.includes(:tag).where(taggable_type: 'Project', id: start_id..stop_id).each do |tagging|
|
||||||
|
if Project.exists?(id: tagging.taggable_id)
|
||||||
topic = Topic.find_or_create_by(name: tagging.tag.name)
|
topic = Topic.find_or_create_by(name: tagging.tag.name)
|
||||||
project_topic = ProjectTopic.find_or_create_by(project_id: tagging.taggable_id, topic: topic)
|
project_topic = ProjectTopic.find_or_create_by(project_id: tagging.taggable_id, topic: topic)
|
||||||
|
|
||||||
tagging.delete if project_topic.persisted?
|
tagging.delete if project_topic.persisted?
|
||||||
|
else
|
||||||
|
tagging.delete
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
mark_job_as_succeeded(start_id, stop_id)
|
mark_job_as_succeeded(start_id, stop_id)
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ module Gitlab
|
||||||
|
|
||||||
# The number of rows in merge_request_diff_commits to get in a single
|
# The number of rows in merge_request_diff_commits to get in a single
|
||||||
# query.
|
# query.
|
||||||
COMMIT_ROWS_PER_QUERY = 10_000
|
COMMIT_ROWS_PER_QUERY = 1_000
|
||||||
|
|
||||||
# The number of rows in merge_request_diff_commits to update in a single
|
# The number of rows in merge_request_diff_commits to update in a single
|
||||||
# query.
|
# query.
|
||||||
|
|
|
||||||
|
|
@ -15,13 +15,10 @@ module Gitlab
|
||||||
end
|
end
|
||||||
|
|
||||||
def schedule_next_job
|
def schedule_next_job
|
||||||
# We process jobs in reverse order, so that (hopefully) we are less
|
|
||||||
# likely to process jobs that the regular background migration job is
|
|
||||||
# also processing.
|
|
||||||
next_job = Database::BackgroundMigrationJob
|
next_job = Database::BackgroundMigrationJob
|
||||||
.for_migration_class('MigrateMergeRequestDiffCommitUsers')
|
.for_migration_class('MigrateMergeRequestDiffCommitUsers')
|
||||||
.pending
|
.pending
|
||||||
.last
|
.first
|
||||||
|
|
||||||
return unless next_job
|
return unless next_job
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,15 +7,19 @@ module Gitlab
|
||||||
class CancelPendingPipelines < Chain::Base
|
class CancelPendingPipelines < Chain::Base
|
||||||
include Chain::Helpers
|
include Chain::Helpers
|
||||||
|
|
||||||
|
BATCH_SIZE = 25
|
||||||
|
|
||||||
|
# rubocop: disable CodeReuse/ActiveRecord
|
||||||
def perform!
|
def perform!
|
||||||
return unless project.auto_cancel_pending_pipelines?
|
return unless project.auto_cancel_pending_pipelines?
|
||||||
|
|
||||||
Gitlab::OptimisticLocking.retry_lock(auto_cancelable_pipelines, name: 'cancel_pending_pipelines') do |cancelables|
|
Gitlab::OptimisticLocking.retry_lock(auto_cancelable_pipelines, name: 'cancel_pending_pipelines') do |cancelables|
|
||||||
cancelables.find_each do |cancelable|
|
cancelables.select(:id).each_batch(of: BATCH_SIZE) do |cancelables_batch|
|
||||||
cancelable.auto_cancel_running(pipeline)
|
auto_cancel_interruptible_pipelines(cancelables_batch.ids)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
# rubocop: enable CodeReuse/ActiveRecord
|
||||||
|
|
||||||
def break?
|
def break?
|
||||||
false
|
false
|
||||||
|
|
@ -23,16 +27,21 @@ module Gitlab
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
# rubocop: disable CodeReuse/ActiveRecord
|
|
||||||
def auto_cancelable_pipelines
|
def auto_cancelable_pipelines
|
||||||
project.all_pipelines.ci_and_parent_sources
|
project.all_pipelines.created_after(1.week.ago)
|
||||||
.where(ref: pipeline.ref)
|
.ci_and_parent_sources
|
||||||
.where.not(id: pipeline.same_family_pipeline_ids)
|
.for_ref(pipeline.ref)
|
||||||
.where.not(sha: project.commit(pipeline.ref).try(:id))
|
.id_not_in(pipeline.same_family_pipeline_ids)
|
||||||
|
.where_not_sha(project.commit(pipeline.ref).try(:id))
|
||||||
.alive_or_scheduled
|
.alive_or_scheduled
|
||||||
.with_only_interruptible_builds
|
|
||||||
end
|
end
|
||||||
# rubocop: enable CodeReuse/ActiveRecord
|
|
||||||
|
def auto_cancel_interruptible_pipelines(pipeline_ids)
|
||||||
|
::Ci::Pipeline
|
||||||
|
.id_in(pipeline_ids)
|
||||||
|
.with_only_interruptible_builds
|
||||||
|
.each { |cancelable| cancelable.auto_cancel_running(pipeline) }
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -4,28 +4,26 @@ namespace :gitlab do
|
||||||
namespace :docs do
|
namespace :docs do
|
||||||
desc "Generate deprecation list from individual files"
|
desc "Generate deprecation list from individual files"
|
||||||
task :compile_deprecations do
|
task :compile_deprecations do
|
||||||
require_relative '../../../../tooling/deprecations/docs/renderer'
|
require_relative '../../../../tooling/deprecations/docs'
|
||||||
|
|
||||||
source_files = Rake::FileList.new("data/deprecations/**/*.yml") do |fl|
|
File.write(Deprecations::Docs.path, Deprecations::Docs.render)
|
||||||
fl.exclude(/example\.yml/)
|
|
||||||
|
puts "Deprecations compiled to #{Deprecations::Docs.path}"
|
||||||
end
|
end
|
||||||
|
|
||||||
deprecations = source_files.map do |file|
|
desc "Check that the deprecation doc is up to date"
|
||||||
YAML.load_file(file)
|
task :check_deprecations do
|
||||||
|
require_relative '../../../../tooling/deprecations/docs'
|
||||||
|
|
||||||
|
contents = Deprecations::Docs.render
|
||||||
|
doc = File.read(Deprecations::Docs.path)
|
||||||
|
|
||||||
|
if doc == contents
|
||||||
|
puts "Deprecations doc is up to date."
|
||||||
|
else
|
||||||
|
format_output('Deprecations doc is outdated! Please update it by running `bundle exec rake gitlab:docs:compile_deprecations`.')
|
||||||
|
abort
|
||||||
end
|
end
|
||||||
|
|
||||||
deprecations.sort_by! { |d| -d["removal_milestone"].to_f }
|
|
||||||
|
|
||||||
milestones = deprecations.map { |d| d["removal_milestone"].to_f }.uniq
|
|
||||||
|
|
||||||
contents = Deprecations::Docs::Renderer
|
|
||||||
.render(deprecations: deprecations, milestones: milestones)
|
|
||||||
|
|
||||||
File.write(
|
|
||||||
File.expand_path("doc/deprecations/index.md", "#{__dir__}/../../../.."),
|
|
||||||
contents)
|
|
||||||
|
|
||||||
puts "Deprecations compiled to doc/deprecations/index.md"
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -35,8 +35,10 @@ module QA
|
||||||
end
|
end
|
||||||
|
|
||||||
def has_number_of_files?(snippet_title, number)
|
def has_number_of_files?(snippet_title, number)
|
||||||
|
retry_until(max_attempts: 5, reload: true, sleep_interval: 1) do # snippet statistics computation can take a few moments
|
||||||
within_element(:snippet_link, snippet_title: snippet_title) do
|
within_element(:snippet_link, snippet_title: snippet_title) do
|
||||||
has_element?(:snippet_file_count_content, snippet_files: number)
|
has_element?(:snippet_file_count_content, snippet_files: number, wait: 5)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -287,6 +287,17 @@ module QA
|
||||||
raise "Rebase did not appear to be successful" unless success
|
raise "Rebase did not appear to be successful" unless success
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def merge_immediately!
|
||||||
|
merge_moment_dropdown_found = has_element?(:merge_moment_dropdown, wait: 0)
|
||||||
|
|
||||||
|
if merge_moment_dropdown_found
|
||||||
|
click_element(:merge_moment_dropdown)
|
||||||
|
click_element(:merge_immediately_menu_item)
|
||||||
|
else
|
||||||
|
click_element(:merge_button)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def try_to_merge!
|
def try_to_merge!
|
||||||
# Revisit after merge page re-architect is done https://gitlab.com/gitlab-org/gitlab/-/issues/300042
|
# Revisit after merge page re-architect is done https://gitlab.com/gitlab-org/gitlab/-/issues/300042
|
||||||
# To remove page refresh logic if possible
|
# To remove page refresh logic if possible
|
||||||
|
|
|
||||||
|
|
@ -58,7 +58,7 @@ module QA
|
||||||
it_behaves_like 'successful tag creation', :maintainer_user
|
it_behaves_like 'successful tag creation', :maintainer_user
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'when protected' do
|
context 'when protected', quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/339727', type: :bug } do
|
||||||
before do
|
before do
|
||||||
add_members_to_project(project)
|
add_members_to_project(project)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -21,14 +21,16 @@ RSpec.describe Gitlab::BackgroundMigration::ExtractProjectTopicsIntoSeparateTabl
|
||||||
tagging_2 = taggings.create!(taggable_type: 'Project', taggable_id: project.id, context: 'topics', tag_id: tag_2.id)
|
tagging_2 = taggings.create!(taggable_type: 'Project', taggable_id: project.id, context: 'topics', tag_id: tag_2.id)
|
||||||
other_tagging = taggings.create!(taggable_type: 'Other', taggable_id: project.id, context: 'topics', tag_id: tag_1.id)
|
other_tagging = taggings.create!(taggable_type: 'Other', taggable_id: project.id, context: 'topics', tag_id: tag_1.id)
|
||||||
tagging_3 = taggings.create!(taggable_type: 'Project', taggable_id: project.id, context: 'topics', tag_id: tag_3.id)
|
tagging_3 = taggings.create!(taggable_type: 'Project', taggable_id: project.id, context: 'topics', tag_id: tag_3.id)
|
||||||
|
tagging_4 = taggings.create!(taggable_type: 'Project', taggable_id: -1, context: 'topics', tag_id: tag_1.id)
|
||||||
|
|
||||||
subject.perform(tagging_1.id, tagging_3.id)
|
subject.perform(tagging_1.id, tagging_4.id)
|
||||||
|
|
||||||
# Tagging records
|
# Tagging records
|
||||||
expect { tagging_1.reload }.to raise_error(ActiveRecord::RecordNotFound)
|
expect { tagging_1.reload }.to raise_error(ActiveRecord::RecordNotFound)
|
||||||
expect { tagging_2.reload }.to raise_error(ActiveRecord::RecordNotFound)
|
expect { tagging_2.reload }.to raise_error(ActiveRecord::RecordNotFound)
|
||||||
expect { other_tagging.reload }.not_to raise_error(ActiveRecord::RecordNotFound)
|
expect { other_tagging.reload }.not_to raise_error(ActiveRecord::RecordNotFound)
|
||||||
expect { tagging_3.reload }.to raise_error(ActiveRecord::RecordNotFound)
|
expect { tagging_3.reload }.to raise_error(ActiveRecord::RecordNotFound)
|
||||||
|
expect { tagging_4.reload }.to raise_error(ActiveRecord::RecordNotFound)
|
||||||
|
|
||||||
# Topic records
|
# Topic records
|
||||||
topic_1 = topics.find_by(name: 'Topic1')
|
topic_1 = topics.find_by(name: 'Topic1')
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,7 @@ RSpec.describe Gitlab::BackgroundMigration::StealMigrateMergeRequestDiffCommitUs
|
||||||
end
|
end
|
||||||
|
|
||||||
describe '#schedule_next_job' do
|
describe '#schedule_next_job' do
|
||||||
it 'schedules the next job in reverse order' do
|
it 'schedules the next job in ascending order' do
|
||||||
Gitlab::Database::BackgroundMigrationJob.create!(
|
Gitlab::Database::BackgroundMigrationJob.create!(
|
||||||
class_name: 'MigrateMergeRequestDiffCommitUsers',
|
class_name: 'MigrateMergeRequestDiffCommitUsers',
|
||||||
arguments: [10, 20]
|
arguments: [10, 20]
|
||||||
|
|
@ -36,7 +36,7 @@ RSpec.describe Gitlab::BackgroundMigration::StealMigrateMergeRequestDiffCommitUs
|
||||||
|
|
||||||
expect(BackgroundMigrationWorker)
|
expect(BackgroundMigrationWorker)
|
||||||
.to receive(:perform_in)
|
.to receive(:perform_in)
|
||||||
.with(5.minutes, 'StealMigrateMergeRequestDiffCommitUsers', [40, 50])
|
.with(5.minutes, 'StealMigrateMergeRequestDiffCommitUsers', [10, 20])
|
||||||
|
|
||||||
migration.schedule_next_job
|
migration.schedule_next_job
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -44,6 +44,14 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::CancelPendingPipelines do
|
||||||
expect(build_statuses(pipeline)).to contain_exactly('pending')
|
expect(build_statuses(pipeline)).to contain_exactly('pending')
|
||||||
end
|
end
|
||||||
|
|
||||||
|
it 'cancels the builds with 2 queries to avoid query timeout' do
|
||||||
|
second_query_regex = /WHERE "ci_pipelines"\."id" = \d+ AND \(NOT EXISTS/
|
||||||
|
recorder = ActiveRecord::QueryRecorder.new { perform }
|
||||||
|
second_query = recorder.occurrences.keys.filter { |occ| occ =~ second_query_regex }
|
||||||
|
|
||||||
|
expect(second_query).to be_one
|
||||||
|
end
|
||||||
|
|
||||||
context 'when the previous pipeline has a child pipeline' do
|
context 'when the previous pipeline has a child pipeline' do
|
||||||
let(:child_pipeline) { create(:ci_pipeline, child_of: prev_pipeline) }
|
let(:child_pipeline) { create(:ci_pipeline, child_of: prev_pipeline) }
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,53 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'spec_helper'
|
||||||
|
|
||||||
|
require_migration!
|
||||||
|
|
||||||
|
RSpec.describe RemoveDuplicateDastSiteTokensWithSameToken do
|
||||||
|
let(:namespaces) { table(:namespaces) }
|
||||||
|
let(:projects) { table(:projects) }
|
||||||
|
let(:dast_site_tokens) { table(:dast_site_tokens) }
|
||||||
|
let!(:namespace) { namespaces.create!(id: 1, name: 'group', path: 'group') }
|
||||||
|
let!(:project1) { projects.create!(id: 1, namespace_id: namespace.id, path: 'project1') }
|
||||||
|
# create non duplicate dast site token
|
||||||
|
let!(:dast_site_token1) { dast_site_tokens.create!(project_id: project1.id, url: 'https://gitlab.com', token: SecureRandom.uuid) }
|
||||||
|
|
||||||
|
context 'when duplicate dast site tokens exists' do
|
||||||
|
# create duplicate dast site token
|
||||||
|
let_it_be(:duplicate_token) { 'duplicate_token' }
|
||||||
|
let_it_be(:other_duplicate_token) { 'other_duplicate_token' }
|
||||||
|
|
||||||
|
let!(:project2) { projects.create!(id: 2, namespace_id: namespace.id, path: 'project2') }
|
||||||
|
let!(:dast_site_token2) { dast_site_tokens.create!(project_id: project2.id, url: 'https://gitlab2.com', token: duplicate_token) }
|
||||||
|
let!(:dast_site_token3) { dast_site_tokens.create!(project_id: project2.id, url: 'https://gitlab3.com', token: duplicate_token) }
|
||||||
|
let!(:dast_site_token4) { dast_site_tokens.create!(project_id: project2.id, url: 'https://gitlab4.com', token: duplicate_token) }
|
||||||
|
|
||||||
|
let!(:project3) { projects.create!(id: 3, namespace_id: namespace.id, path: 'project3') }
|
||||||
|
let!(:dast_site_token5) { dast_site_tokens.create!(project_id: project3.id, url: 'https://gitlab2.com', token: other_duplicate_token) }
|
||||||
|
let!(:dast_site_token6) { dast_site_tokens.create!(project_id: project3.id, url: 'https://gitlab3.com', token: other_duplicate_token) }
|
||||||
|
let!(:dast_site_token7) { dast_site_tokens.create!(project_id: project3.id, url: 'https://gitlab4.com', token: other_duplicate_token) }
|
||||||
|
|
||||||
|
describe 'migration up' do
|
||||||
|
it 'does remove duplicated dast site tokens with the same token' do
|
||||||
|
expect(dast_site_tokens.count).to eq(7)
|
||||||
|
expect(dast_site_tokens.where(token: duplicate_token).size).to eq(3)
|
||||||
|
|
||||||
|
migrate!
|
||||||
|
|
||||||
|
expect(dast_site_tokens.count).to eq(3)
|
||||||
|
expect(dast_site_tokens.where(token: duplicate_token).size).to eq(1)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when duplicate dast site tokens do not exist' do
|
||||||
|
let!(:dast_site_token5) { dast_site_tokens.create!(project_id: 1, url: 'https://gitlab5.com', token: SecureRandom.uuid) }
|
||||||
|
|
||||||
|
describe 'migration up' do
|
||||||
|
it 'does not remove any dast site tokens' do
|
||||||
|
expect { migrate! }.not_to change(dast_site_tokens, :count)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -0,0 +1,69 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'spec_helper'
|
||||||
|
require_migration! 'slice_merge_request_diff_commit_migrations'
|
||||||
|
|
||||||
|
RSpec.describe SliceMergeRequestDiffCommitMigrations, :migration do
|
||||||
|
let(:migration) { described_class.new }
|
||||||
|
|
||||||
|
describe '#up' do
|
||||||
|
context 'when there are no jobs to process' do
|
||||||
|
it 'does nothing' do
|
||||||
|
expect(migration).not_to receive(:migrate_in)
|
||||||
|
expect(Gitlab::Database::BackgroundMigrationJob).not_to receive(:create!)
|
||||||
|
|
||||||
|
migration.up
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when there are pending jobs' do
|
||||||
|
let!(:job1) do
|
||||||
|
Gitlab::Database::BackgroundMigrationJob.create!(
|
||||||
|
class_name: described_class::MIGRATION_CLASS,
|
||||||
|
arguments: [1, 10_001]
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
let!(:job2) do
|
||||||
|
Gitlab::Database::BackgroundMigrationJob.create!(
|
||||||
|
class_name: described_class::MIGRATION_CLASS,
|
||||||
|
arguments: [10_001, 20_001]
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'marks the old jobs as finished' do
|
||||||
|
migration.up
|
||||||
|
|
||||||
|
job1.reload
|
||||||
|
job2.reload
|
||||||
|
|
||||||
|
expect(job1).to be_succeeded
|
||||||
|
expect(job2).to be_succeeded
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'the jobs are slices into smaller ranges' do
|
||||||
|
migration.up
|
||||||
|
|
||||||
|
new_jobs = Gitlab::Database::BackgroundMigrationJob
|
||||||
|
.for_migration_class(described_class::MIGRATION_CLASS)
|
||||||
|
.pending
|
||||||
|
.to_a
|
||||||
|
|
||||||
|
expect(new_jobs.map(&:arguments)).to eq([
|
||||||
|
[1, 5_001],
|
||||||
|
[5_001, 10_001],
|
||||||
|
[10_001, 15_001],
|
||||||
|
[15_001, 20_001]
|
||||||
|
])
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'schedules a background migration for the first job' do
|
||||||
|
expect(migration)
|
||||||
|
.to receive(:migrate_in)
|
||||||
|
.with(1.hour, described_class::STEAL_MIGRATION_CLASS, [1, 5_001])
|
||||||
|
|
||||||
|
migration.up
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -183,6 +183,28 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
describe '.where_not_sha' do
|
||||||
|
let_it_be(:pipeline) { create(:ci_pipeline, sha: 'abcx') }
|
||||||
|
let_it_be(:pipeline_2) { create(:ci_pipeline, sha: 'abc') }
|
||||||
|
|
||||||
|
let(:sha) { 'abc' }
|
||||||
|
|
||||||
|
subject { described_class.where_not_sha(sha) }
|
||||||
|
|
||||||
|
it 'returns the pipeline without the specified sha' do
|
||||||
|
is_expected.to contain_exactly(pipeline)
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when argument is array' do
|
||||||
|
let(:sha) { %w[abc abcx] }
|
||||||
|
|
||||||
|
it 'returns the pipelines without the specified shas' do
|
||||||
|
pipeline_3 = create(:ci_pipeline, sha: 'abcy')
|
||||||
|
is_expected.to contain_exactly(pipeline_3)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
describe '.for_source_sha' do
|
describe '.for_source_sha' do
|
||||||
subject { described_class.for_source_sha(source_sha) }
|
subject { described_class.for_source_sha(source_sha) }
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -69,15 +69,10 @@ RSpec.describe FeatureFlags::CreateService do
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'creates audit event' do
|
it 'creates audit event' do
|
||||||
expected_message = 'Created feature flag feature_flag '\
|
|
||||||
'with description "description". '\
|
|
||||||
'Created strategy "default" with scopes '\
|
|
||||||
'"*". '\
|
|
||||||
'Created strategy "default" with scopes '\
|
|
||||||
'"production".'
|
|
||||||
|
|
||||||
expect { subject }.to change { AuditEvent.count }.by(1)
|
expect { subject }.to change { AuditEvent.count }.by(1)
|
||||||
expect(AuditEvent.last.details[:custom_message]).to eq(expected_message)
|
expect(AuditEvent.last.details[:custom_message]).to start_with('Created feature flag feature_flag with description "description".')
|
||||||
|
expect(AuditEvent.last.details[:custom_message]).to include('Created strategy "default" with scopes "*".')
|
||||||
|
expect(AuditEvent.last.details[:custom_message]).to include('Created strategy "default" with scopes "production".')
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'when user is reporter' do
|
context 'when user is reporter' do
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,37 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
require 'erb'
|
||||||
|
|
||||||
|
module Deprecations
|
||||||
|
module Docs
|
||||||
|
module_function
|
||||||
|
|
||||||
|
def path
|
||||||
|
Rails.root.join("doc/update/deprecations.md")
|
||||||
|
end
|
||||||
|
|
||||||
|
def render
|
||||||
|
deprecations_yaml_glob = Rails.root.join("data/deprecations/**/*.yml")
|
||||||
|
|
||||||
|
source_files = Rake::FileList.new(deprecations_yaml_glob) do |fl|
|
||||||
|
fl.exclude(/example\.yml$/)
|
||||||
|
end
|
||||||
|
|
||||||
|
deprecations = source_files.flat_map do |file|
|
||||||
|
YAML.load_file(file)
|
||||||
|
end
|
||||||
|
|
||||||
|
deprecations = VersionSorter.rsort(deprecations) { |d| d["removal_milestone"] }
|
||||||
|
|
||||||
|
milestones = deprecations.map { |d| d["removal_milestone"] }.uniq
|
||||||
|
|
||||||
|
template = Rails.root.join("data/deprecations/templates/_deprecation_template.md.erb")
|
||||||
|
|
||||||
|
load_template(template)
|
||||||
|
.result_with_hash(deprecations: deprecations, milestones: milestones)
|
||||||
|
end
|
||||||
|
|
||||||
|
def load_template(filename)
|
||||||
|
ERB.new(File.read(filename), trim_mode: '-')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
# frozen_string_literal: true
|
|
||||||
require 'erb'
|
|
||||||
|
|
||||||
module Deprecations
|
|
||||||
module Docs
|
|
||||||
module Renderer
|
|
||||||
module_function
|
|
||||||
|
|
||||||
def render(**variables)
|
|
||||||
template = File.expand_path("data/deprecations/templates/_deprecation_template.md.erb", "#{__dir__}/../../..")
|
|
||||||
|
|
||||||
load_template(template).result_with_hash(variables)
|
|
||||||
end
|
|
||||||
|
|
||||||
def load_template(filename)
|
|
||||||
ERB.new(File.read(filename), trim_mode: '-')
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
Loading…
Reference in New Issue