Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-03-27 12:14:08 +00:00
parent ff3955ef8e
commit e9de373448
40 changed files with 556 additions and 97 deletions

View File

@ -12,8 +12,14 @@ const language = {
// comment
[/^#.*$/, 'comment'],
// optional approval
[/^\^/, 'constant.numeric'],
// number of approvers
[/\[\d+\]$/, 'constant.numeric'],
// section
[/^\^\[[\s\S]+\]$/, 'namespace'],
[/\[(?!\d+\])[^\]]+\]/, 'namespace'],
// pattern
[/^\s*(\S+)/, 'regexp'],

View File

@ -59,7 +59,7 @@ export default {
:class="wrapperClasses"
v-on="$listeners"
>
<div v-if="isLoading" class="gl-w-full mr-conflict-loader">
<div v-if="isLoading" class="gl-w-full mr-state-loader">
<slot name="loading">
<div class="gl-display-flex">
<status-icon status="loading" />

View File

@ -133,10 +133,10 @@ export default {
<template>
<state-container :mr="mr" status="scheduled" :is-loading="loading" :actions="actions">
<template #loading>
<gl-skeleton-loader :width="334" :height="30">
<rect x="0" y="3" width="24" height="24" rx="4" />
<rect x="32" y="7" width="150" height="16" rx="4" />
<rect x="190" y="7" width="144" height="16" rx="4" />
<gl-skeleton-loader :width="334" :height="24">
<rect x="0" y="0" width="24" height="24" rx="4" />
<rect x="32" y="2" width="150" height="20" rx="4" />
<rect x="190" y="2" width="144" height="20" rx="4" />
</gl-skeleton-loader>
</template>
<template v-if="!loading">

View File

@ -74,10 +74,10 @@ export default {
<template>
<state-container :mr="mr" status="failed" :is-loading="isLoading">
<template #loading>
<gl-skeleton-loader :width="334" :height="30">
<rect x="0" y="7" width="150" height="16" rx="4" />
<rect x="158" y="7" width="84" height="16" rx="4" />
<rect x="250" y="7" width="84" height="16" rx="4" />
<gl-skeleton-loader :width="334" :height="24">
<rect x="0" y="0" width="24" height="24" rx="4" />
<rect x="32" y="2" width="150" height="20" rx="4" />
<rect x="190" y="2" width="144" height="20" rx="4" />
</gl-skeleton-loader>
</template>
<template v-if="!isLoading">

View File

@ -197,9 +197,9 @@ export default {
<div>
<state-container :mr="mr" :status="status" :is-loading="isLoading">
<template #loading>
<gl-skeleton-loader :width="334" :height="30">
<rect x="0" y="3" width="24" height="24" rx="4" />
<rect x="32" y="5" width="302" height="20" rx="4" />
<gl-skeleton-loader :width="334" :height="24">
<rect x="0" y="0" width="24" height="24" rx="4" />
<rect x="32" y="2" width="302" height="20" rx="4" />
</gl-skeleton-loader>
</template>
<template v-if="!isLoading">

View File

@ -508,11 +508,13 @@ export default {
>
<div v-if="loading" class="mr-widget-body">
<div class="gl-w-full mr-ready-to-merge-loader">
<gl-skeleton-loader :width="418" :height="30">
<rect x="0" y="3" width="24" height="24" rx="4" />
<rect x="32" y="0" width="70" height="30" rx="4" />
<rect x="110" y="7" width="150" height="16" rx="4" />
<rect x="268" y="7" width="150" height="16" rx="4" />
<gl-skeleton-loader :width="418" :height="86">
<rect x="0" y="0" width="144" height="20" rx="4" />
<rect x="0" y="26" width="100" height="16" rx="4" />
<rect x="108" y="26" width="100" height="16" rx="4" />
<rect x="0" y="48" width="130" height="16" rx="4" />
<rect x="0" y="70" width="80" height="16" rx="4" />
<rect x="88" y="70" width="90" height="16" rx="4" />
</gl-skeleton-loader>
</div>
</div>

View File

@ -1241,3 +1241,11 @@ $tabs-holder-z-index: 250;
}
}
}
.mr-state-loader {
max-width: 334px;
svg {
vertical-align: middle;
}
}

View File

@ -138,6 +138,8 @@ class RegistrationsController < Devise::RegistrationsController
if identity_verification_enabled?
session[:verification_user_id] = resource.id # This is needed to find the user on the identity verification page
User.sticking.stick_or_unstick_request(request.env, :user, resource.id)
return identity_verification_redirect_path
end

View File

@ -8,3 +8,5 @@ description: Package tracking events (deprecated)
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/41846
milestone: '13.5'
gitlab_schema: gitlab_main
removed_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/114947
removed_in_milestone: '15.11'

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class TrackPushRulesDeletions < Gitlab::Database::Migration[2.1]
include Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers
enable_lock_retries!
def up
track_record_deletions(:push_rules)
end
def down
untrack_record_deletions(:push_rules)
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
class TrackTagsDeletions < Gitlab::Database::Migration[2.1]
include Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers
enable_lock_retries!
def up
track_record_deletions(:tags)
end
def down
untrack_record_deletions(:tags)
end
end

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
class RemovePackagesEventsPackageIdFk < Gitlab::Database::Migration[2.1]
FK_NAME = 'fk_rails_c6c20d0094'
SOURCE_TABLE = :packages_events
TARGET_TABLE = :packages_packages
COLUMN = :package_id
disable_ddl_transaction!
def up
with_lock_retries do
remove_foreign_key_if_exists(
SOURCE_TABLE,
TARGET_TABLE,
name: FK_NAME,
reverse_lock_order: true
)
end
end
def down
add_concurrent_foreign_key(
SOURCE_TABLE,
TARGET_TABLE,
name: FK_NAME,
column: COLUMN,
on_delete: :nullify
)
end
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
class DropPackagesEventsTable < Gitlab::Database::Migration[2.1]
def up
drop_table :packages_events, if_exists: true
end
def down
return if table_exists?(:packages_events)
create_table :packages_events do |t| # rubocop:disable Migration/SchemaAdditionMethodsNoPost
t.integer :event_type, limit: 2, null: false
t.integer :event_scope, limit: 2, null: false
t.integer :originator_type, limit: 2, null: false
t.bigint :originator
t.datetime_with_timezone :created_at, null: false
t.references :package,
index: true,
foreign_key: { to_table: :packages_packages, on_delete: :nullify },
type: :bigint
end
end
end

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
class EnsureSnippetUserMentionsBigintBackfillIsFinishedForGitlabDotCom < Gitlab::Database::Migration[2.1]
include Gitlab::Database::MigrationHelpers::ConvertToBigint
restrict_gitlab_migration gitlab_schema: :gitlab_main
disable_ddl_transaction!
def up
return unless should_run?
ensure_batched_background_migration_is_finished(
job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
table_name: 'snippet_user_mentions',
column_name: 'id',
job_arguments: [['note_id'], ['note_id_convert_to_bigint']]
)
end
def down
# no-op
end
private
def should_run?
com_or_dev_or_test_but_not_jh?
end
end

View File

@ -0,0 +1,74 @@
# frozen_string_literal: true
class SwapSnippetUserMentionsNoteIdToBigintForGitlabDotCom < Gitlab::Database::Migration[2.1]
include Gitlab::Database::MigrationHelpers::ConvertToBigint
disable_ddl_transaction!
TABLE_NAME = 'snippet_user_mentions'
def up
return unless should_run?
swap
end
def down
return unless should_run?
swap
end
def swap
# This will replace the existing index_snippet_user_mentions_on_note_id
add_concurrent_index TABLE_NAME, :note_id_convert_to_bigint, unique: true,
name: 'index_snippet_user_mentions_on_note_id_convert_to_bigint',
where: 'note_id_convert_to_bigint IS NOT NULL'
# This will replace the existing snippet_user_mentions_on_snippet_id_and_note_id_index
add_concurrent_index TABLE_NAME, [:snippet_id, :note_id_convert_to_bigint], unique: true,
name: 'tmp_snippet_user_mentions_on_snippet_id_and_note_id_index'
# This will replace the existing snippet_user_mentions_on_snippet_id_index
add_concurrent_index TABLE_NAME, :snippet_id, unique: true,
name: 'tmp_snippet_user_mentions_on_snippet_id_index',
where: 'note_id_convert_to_bigint IS NULL'
# This will replace the existing fk_rails_4d3f96b2cb
add_concurrent_foreign_key TABLE_NAME, :notes, column: :note_id_convert_to_bigint,
name: 'fk_snippet_user_mentions_note_id_convert_to_bigint',
on_delete: :cascade
with_lock_retries(raise_on_exhaustion: true) do
execute "LOCK TABLE notes, #{TABLE_NAME} IN ACCESS EXCLUSIVE MODE"
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN note_id TO note_id_tmp"
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN note_id_convert_to_bigint TO note_id"
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN note_id_tmp TO note_id_convert_to_bigint"
function_name = Gitlab::Database::UnidirectionalCopyTrigger
.on_table(TABLE_NAME, connection: connection)
.name(:note_id, :note_id_convert_to_bigint)
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
execute 'DROP INDEX IF EXISTS index_snippet_user_mentions_on_note_id'
rename_index TABLE_NAME, 'index_snippet_user_mentions_on_note_id_convert_to_bigint',
'index_snippet_user_mentions_on_note_id'
execute 'DROP INDEX IF EXISTS snippet_user_mentions_on_snippet_id_and_note_id_index'
rename_index TABLE_NAME, 'tmp_snippet_user_mentions_on_snippet_id_and_note_id_index',
'snippet_user_mentions_on_snippet_id_and_note_id_index'
execute 'DROP INDEX IF EXISTS snippet_user_mentions_on_snippet_id_index'
rename_index TABLE_NAME, 'tmp_snippet_user_mentions_on_snippet_id_index',
'snippet_user_mentions_on_snippet_id_index'
execute "ALTER TABLE #{TABLE_NAME} DROP CONSTRAINT IF EXISTS fk_rails_4d3f96b2cb"
rename_constraint(TABLE_NAME, 'fk_snippet_user_mentions_note_id_convert_to_bigint', 'fk_rails_4d3f96b2cb')
end
end
def should_run?
com_or_dev_or_test_but_not_jh?
end
end

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class FixApplicationSettingPushRuleIdFk < Gitlab::Database::Migration[2.1]
restrict_gitlab_migration gitlab_schema: :gitlab_main
# This migration fixes missing `track_record_deletions(:push_rules)`
# where the `application_settings.push_rule_id` would not be reset
# after removing push rule.
def up
execute <<~SQL
UPDATE application_settings SET push_rule_id=NULL
WHERE push_rule_id IS NOT NULL AND NOT EXISTS (
SELECT * FROM push_rules WHERE push_rules.id = application_settings.push_rule_id
)
SQL
end
def down; end
end

View File

@ -0,0 +1 @@
d9a660e9415a0ab3f128609d72b2da19982d1ef6a8441dd2f627427890d0d3be

View File

@ -0,0 +1 @@
6aa9022f0e945d66fd62a49e1d392a8fb5e55bd96804b1eb50bfff135a7954b6

View File

@ -0,0 +1 @@
5e70b9a4dd74523e8cc44b22d2ebb5dcc326fcfb52bacdd63090b5abe8b36732

View File

@ -0,0 +1 @@
b00bad4559b3f7a959771b08f03b2d6cd4eadb2862dc8840065885cd87d09a77

View File

@ -0,0 +1 @@
df9cd2f72ada861f94a7957513982c50276fbe8bf29f82f29bbc306cf9075d9b

View File

@ -0,0 +1 @@
5c572b7d9ee5f457c5bdd49fea89143eefd40680c571942b68e370857cf3af0e

View File

@ -0,0 +1 @@
9a513d1ac05a263b7057fbdcc3640d9269263dcb620ee7eea79888a0afec136c

View File

@ -19503,25 +19503,6 @@ CREATE SEQUENCE packages_dependency_links_id_seq
ALTER SEQUENCE packages_dependency_links_id_seq OWNED BY packages_dependency_links.id;
CREATE TABLE packages_events (
id bigint NOT NULL,
event_type smallint NOT NULL,
event_scope smallint NOT NULL,
originator_type smallint NOT NULL,
originator bigint,
created_at timestamp with time zone NOT NULL,
package_id bigint
);
CREATE SEQUENCE packages_events_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE packages_events_id_seq OWNED BY packages_events.id;
CREATE TABLE packages_helm_file_metadata (
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
@ -22426,11 +22407,11 @@ CREATE TABLE snippet_statistics (
CREATE TABLE snippet_user_mentions (
id bigint NOT NULL,
snippet_id integer NOT NULL,
note_id integer,
note_id_convert_to_bigint integer,
mentioned_users_ids integer[],
mentioned_projects_ids integer[],
mentioned_groups_ids integer[],
note_id_convert_to_bigint bigint
note_id bigint
);
CREATE SEQUENCE snippet_user_mentions_id_seq
@ -25125,8 +25106,6 @@ ALTER TABLE ONLY packages_dependencies ALTER COLUMN id SET DEFAULT nextval('pack
ALTER TABLE ONLY packages_dependency_links ALTER COLUMN id SET DEFAULT nextval('packages_dependency_links_id_seq'::regclass);
ALTER TABLE ONLY packages_events ALTER COLUMN id SET DEFAULT nextval('packages_events_id_seq'::regclass);
ALTER TABLE ONLY packages_maven_metadata ALTER COLUMN id SET DEFAULT nextval('packages_maven_metadata_id_seq'::regclass);
ALTER TABLE ONLY packages_package_file_build_infos ALTER COLUMN id SET DEFAULT nextval('packages_package_file_build_infos_id_seq'::regclass);
@ -27350,9 +27329,6 @@ ALTER TABLE ONLY packages_dependencies
ALTER TABLE ONLY packages_dependency_links
ADD CONSTRAINT packages_dependency_links_pkey PRIMARY KEY (id);
ALTER TABLE ONLY packages_events
ADD CONSTRAINT packages_events_pkey PRIMARY KEY (id);
ALTER TABLE ONLY packages_helm_file_metadata
ADD CONSTRAINT packages_helm_file_metadata_pkey PRIMARY KEY (package_file_id);
@ -31329,8 +31305,6 @@ CREATE UNIQUE INDEX index_packages_dependencies_on_name_and_version_pattern ON p
CREATE INDEX index_packages_dependency_links_on_dependency_id ON packages_dependency_links USING btree (dependency_id);
CREATE INDEX index_packages_events_on_package_id ON packages_events USING btree (package_id);
CREATE INDEX index_packages_helm_file_metadata_on_channel ON packages_helm_file_metadata USING btree (channel);
CREATE INDEX index_packages_helm_file_metadata_on_pf_id_and_channel ON packages_helm_file_metadata USING btree (package_file_id, channel);
@ -34089,6 +34063,10 @@ CREATE TRIGGER nullify_merge_request_metrics_build_data_on_update BEFORE UPDATE
CREATE TRIGGER projects_loose_fk_trigger AFTER DELETE ON projects REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records();
CREATE TRIGGER push_rules_loose_fk_trigger AFTER DELETE ON push_rules REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records();
CREATE TRIGGER tags_loose_fk_trigger AFTER DELETE ON tags REFERENCING OLD TABLE AS old_table FOR EACH STATEMENT EXECUTE FUNCTION insert_into_loose_foreign_keys_deleted_records();
CREATE TRIGGER trigger_080e73845bfd BEFORE INSERT OR UPDATE ON notes FOR EACH ROW EXECUTE FUNCTION trigger_080e73845bfd();
CREATE TRIGGER trigger_0e214b8a14f2 BEFORE INSERT OR UPDATE ON vulnerability_user_mentions FOR EACH ROW EXECUTE FUNCTION trigger_0e214b8a14f2();
@ -36481,9 +36459,6 @@ ALTER TABLE ONLY boards_epic_board_recent_visits
ALTER TABLE ONLY ci_job_artifacts
ADD CONSTRAINT fk_rails_c5137cb2c1_p FOREIGN KEY (partition_id, job_id) REFERENCES ci_builds(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY packages_events
ADD CONSTRAINT fk_rails_c6c20d0094 FOREIGN KEY (package_id) REFERENCES packages_packages(id) ON DELETE SET NULL;
ALTER TABLE ONLY project_settings
ADD CONSTRAINT fk_rails_c6df6e6328 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;

View File

@ -304,7 +304,7 @@ If your **primary** site is using a [custom or self-signed certificate for inbou
sudo gitlab-ctl reconfigure
```
### Step 5. Enable Git access over HTTP/HTTPS
### Step 5. Enable Git access over HTTP/HTTPS and SSH
Geo synchronizes repositories over HTTP/HTTPS, and therefore requires this clone
method to be enabled. This is enabled by default, but if converting an existing site to Geo it should be checked:
@ -314,7 +314,10 @@ On the **primary** site:
1. On the top bar, select **Main menu > Admin**.
1. On the left sidebar, select **Settings > General**.
1. Expand **Visibility and access controls**.
1. Ensure "Enabled Git access protocols" is set to either "Both SSH and HTTP(S)" or "Only HTTP(S)".
1. If using Git over SSH, then:
1. Ensure "Enabled Git access protocols" is set to "Both SSH and HTTP(S)".
1. Follow [Fast lookup of authorized SSH keys in the database](../../operations/fast_ssh_key_lookup.md) on both primary and secondary sites.
1. If not using Git over SSH, then set "Enabled Git access protocols" to "Only HTTP(S)".
### Step 6. Verify proper functioning of the **secondary** site

View File

@ -24,7 +24,6 @@ type: howto
If you installed GitLab using the Omnibus packages (highly recommended):
1. [Set up the database replication](database.md) (`primary (read-write) <-> secondary (read-only)` topology).
1. [Configure fast lookup of authorized SSH keys in the database](../../operations/fast_ssh_key_lookup.md). This step is required and needs to be done on **both** the **primary** and **secondary** sites.
1. [Configure GitLab](../replication/configuration.md) to set the **primary** and **secondary** sites.
1. Optional: [Configure Object storage](../../object_storage.md)
1. Optional: [Configure a secondary LDAP server](../../auth/ldap/index.md) for the **secondary** sites. See [notes on LDAP](../index.md#ldap).

View File

@ -72,13 +72,13 @@ With standalone setups, especially single node environments, there are [various
### High Availability (HA)
High Availability ensures every component in the GitLab setup can handle failures through various mechanisms. To achieve this however is complex, and the environments required can be sizable.
High Availability ensures every component in the GitLab setup can handle failures through various mechanisms. However, to achieve this is complex and the environments required can be sizable.
For environments serving 3,000 or more users we generally recommend that a HA strategy is used as at this level outages have a bigger impact against more users. All the architectures in this range have HA built in by design for this reason.
#### Do you need High Availability (HA)?
As mentioned above, achieving HA does come at a cost. The environment's required are sizable as each component needs to be multiplied, which comes with additional actual and maintenance costs.
As mentioned above, achieving HA does come at a cost. The environment requirements are sizable as each component needs to be multiplied, which comes with additional actual and maintenance costs.
For a lot of our customers with fewer than 3,000 users, we've found a backup strategy is sufficient and even preferable. While this does have a slower recovery time, it also means you have a much smaller architecture and less maintenance costs as a result.
@ -95,9 +95,9 @@ If you still need to have HA for a lower number of users, this can be achieved w
[Zero Downtime Upgrades](../../update/zero_downtime.md) are available for standard Reference Architecture environments with HA (Cloud Native Hybrid is not supported at this time). This allows for an environment to stay up during an upgrade, but the process is more complex as a result and has some limitations as detailed in the documentation.
When going through this process it's worth noting that there may still be brief moments of downtime when the HA mechanisms tale effect.
When going through this process it's worth noting that there may still be brief moments of downtime when the HA mechanisms take effect.
In most cases the downtime required for doing an upgrade in general shouldn't be substantial, so this is only recommended if it's a key requirement for you.
In most cases the downtime required for doing an upgrade shouldn't be substantial, so this is only recommended if it's a key requirement for you.
### Cloud Native Hybrid (Kubernetes HA)
@ -239,8 +239,8 @@ for more information and guidance.
[Praefect requires its own database server](../gitaly/praefect.md#postgresql) and
that to achieve full High Availability, a third-party PostgreSQL database solution is required.
We hope to offer a built in solutions for these restrictions in the future but, in the meantime, a non HA PostgreSQL server
can be set up using Omnibus GitLab, the specifications reflect. Refer to the following issues for more information:
We hope to offer a built-in solution for these restrictions in the future. In the meantime, a non-HA PostgreSQL server
can be set up using Omnibus GitLab as the specifications reflect. Refer to the following issues for more information:
- [`omnibus-gitlab#5919`](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/5919).
- [`gitaly#3398`](https://gitlab.com/gitlab-org/gitaly/-/issues/3398).

View File

@ -8,7 +8,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
WARNING:
These are advanced settings. While they are used on GitLab.com, most GitLab
instances should add more processes that all listen to all queues. This is the
instances should only add more processes that listen to all queues. This is the
same approach we take in our [Reference Architectures](../reference_architectures/index.md).
GitLab has two options for creating Sidekiq processes that only handle specific

View File

@ -353,9 +353,17 @@ to create an incremental backup from:
To create an incremental backup, run:
```shell
sudo gitlab-backup create INCREMENTAL=yes PREVIOUS_BACKUP=<timestamp_of_backup>
```
- In GitLab 15.0 or later:
```shell
sudo gitlab-backup create INCREMENTAL=yes PREVIOUS_BACKUP=<timestamp_of_backup>
```
- In GitLab 14.9 and 14.10:
```shell
sudo gitlab-backup create INCREMENTAL=yes BACKUP=<timestamp_of_backup>
```
To create an [untarred](#skipping-tar-creation) incremental backup from a tarred backup, use `SKIP=tar`:

View File

@ -2868,6 +2868,9 @@ msgstr ""
msgid "AdminSettings|Clickhouse URL"
msgstr ""
msgid "AdminSettings|Collector host"
msgstr ""
msgid "AdminSettings|Configure Let's Encrypt"
msgstr ""
@ -3159,6 +3162,9 @@ msgstr ""
msgid "AdminSettings|The host of your Jitsu instance."
msgstr ""
msgid "AdminSettings|The host of your data collector instance."
msgstr ""
msgid "AdminSettings|The latest artifacts for all jobs in the most recent successful pipelines in each project are stored and do not expire."
msgstr ""

View File

@ -19,7 +19,66 @@ describe('tokenization for CODEOWNERS files', () => {
],
],
],
['^[Section name]', [[{ language: 'codeowners', offset: 0, type: 'namespace.codeowners' }]]],
[
'^[Section name]',
[
[
{ language: 'codeowners', offset: 0, type: 'constant.numeric.codeowners' },
{ language: 'codeowners', offset: 1, type: 'namespace.codeowners' },
],
],
],
[
'[Section name][3]',
[
[
{ language: 'codeowners', offset: 0, type: 'namespace.codeowners' },
{ language: 'codeowners', offset: 14, type: 'constant.numeric.codeowners' },
],
],
],
[
'[Section name][30]',
[
[
{ language: 'codeowners', offset: 0, type: 'namespace.codeowners' },
{ language: 'codeowners', offset: 14, type: 'constant.numeric.codeowners' },
],
],
],
[
'^[Section name][3]',
[
[
{ language: 'codeowners', offset: 0, type: 'constant.numeric.codeowners' },
{ language: 'codeowners', offset: 1, type: 'namespace.codeowners' },
{ language: 'codeowners', offset: 15, type: 'constant.numeric.codeowners' },
],
],
],
[
'^[Section-name-test][3]',
[
[
{ language: 'codeowners', offset: 0, type: 'constant.numeric.codeowners' },
{ language: 'codeowners', offset: 1, type: 'namespace.codeowners' },
{ language: 'codeowners', offset: 20, type: 'constant.numeric.codeowners' },
],
],
],
[
'[Section-name_test]',
[[{ language: 'codeowners', offset: 0, type: 'namespace.codeowners' }]],
],
[
'[2 Be or not 2 be][3]',
[
[
{ language: 'codeowners', offset: 0, type: 'namespace.codeowners' },
{ language: 'codeowners', offset: 18, type: 'constant.numeric.codeowners' },
],
],
],
])('%s', (string, tokens) => {
expect(editor.tokenize(string, 'codeowners')).toEqual(tokens);
});

View File

@ -1,5 +1,6 @@
import { GlButton } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { stubComponent } from 'helpers/stub_component';
import BlobButtonGroup from '~/repository/components/blob_button_group.vue';
import DeleteBlobModal from '~/repository/components/delete_blob_modal.vue';
import UploadBlobModal from '~/repository/components/upload_blob_modal.vue';
@ -26,7 +27,24 @@ const DEFAULT_INJECT = {
describe('BlobButtonGroup component', () => {
let wrapper;
let showUploadBlobModalMock;
let showDeleteBlobModalMock;
const createComponent = (props = {}) => {
showUploadBlobModalMock = jest.fn();
showDeleteBlobModalMock = jest.fn();
const UploadBlobModalStub = stubComponent(UploadBlobModal, {
methods: {
show: showUploadBlobModalMock,
},
});
const DeleteBlobModalStub = stubComponent(DeleteBlobModal, {
methods: {
show: showDeleteBlobModalMock,
},
});
wrapper = mountExtended(BlobButtonGroup, {
propsData: {
...DEFAULT_PROPS,
@ -35,6 +53,10 @@ describe('BlobButtonGroup component', () => {
provide: {
...DEFAULT_INJECT,
},
stubs: {
UploadBlobModal: UploadBlobModalStub,
DeleteBlobModal: DeleteBlobModalStub,
},
});
};
@ -57,8 +79,6 @@ describe('BlobButtonGroup component', () => {
describe('buttons', () => {
beforeEach(() => {
createComponent();
jest.spyOn(findUploadBlobModal().vm, 'show');
jest.spyOn(findDeleteBlobModal().vm, 'show');
});
it('renders both the replace and delete button', () => {
@ -73,33 +93,31 @@ describe('BlobButtonGroup component', () => {
it('triggers the UploadBlobModal from the replace button', () => {
findReplaceButton().trigger('click');
expect(findUploadBlobModal().vm.show).toHaveBeenCalled();
expect(showUploadBlobModalMock).toHaveBeenCalled();
});
it('triggers the DeleteBlobModal from the delete button', () => {
findDeleteButton().trigger('click');
expect(findDeleteBlobModal().vm.show).toHaveBeenCalled();
expect(showDeleteBlobModalMock).toHaveBeenCalled();
});
describe('showForkSuggestion set to true', () => {
beforeEach(() => {
createComponent({ showForkSuggestion: true });
jest.spyOn(findUploadBlobModal().vm, 'show');
jest.spyOn(findDeleteBlobModal().vm, 'show');
});
it('does not trigger the UploadBlobModal from the replace button', () => {
findReplaceButton().trigger('click');
expect(findUploadBlobModal().vm.show).not.toHaveBeenCalled();
expect(showUploadBlobModalMock).not.toHaveBeenCalled();
expect(wrapper.emitted().fork).toHaveLength(1);
});
it('does not trigger the DeleteBlobModal from the delete button', () => {
findDeleteButton().trigger('click');
expect(findDeleteBlobModal().vm.show).not.toHaveBeenCalled();
expect(showDeleteBlobModalMock).not.toHaveBeenCalled();
expect(wrapper.emitted().fork).toHaveLength(1);
});
});

View File

@ -85,31 +85,40 @@ RSpec.describe Gitlab::Database::LooseForeignKeys do
end
end
describe '.definitions' do
subject(:definitions) { described_class.definitions }
it 'contains at least all parent tables that have triggers' do
all_definition_parent_tables = definitions.map { |d| d.to_table }.to_set
context 'all tables have correct triggers installed' do
let(:all_tables_from_yaml) { described_class.definitions.pluck(:to_table).uniq }
let(:all_tables_with_triggers) do
triggers_query = <<~SQL
SELECT event_object_table, trigger_name
FROM information_schema.triggers
SELECT event_object_table FROM information_schema.triggers
WHERE trigger_name LIKE '%_loose_fk_trigger'
GROUP BY event_object_table, trigger_name
SQL
all_triggers = ApplicationRecord.connection.execute(triggers_query)
ApplicationRecord.connection.execute(triggers_query)
.pluck('event_object_table').uniq
end
all_triggers.each do |trigger|
table = trigger['event_object_table']
trigger_name = trigger['trigger_name']
error_message = <<~END
Missing a loose foreign key definition for parent table: #{table} with trigger: #{trigger_name}.
Loose foreign key definitions must be added before triggers are added and triggers must be removed before removing the loose foreign key definition.
Read more at https://docs.gitlab.com/ee/development/database/loose_foreign_keys.html ."
END
expect(all_definition_parent_tables).to include(table), error_message
end
it 'all YAML tables do have `track_record_deletions` installed' do
missing_trigger_tables = all_tables_from_yaml - all_tables_with_triggers
expect(missing_trigger_tables).to be_empty, <<~END
The loose foreign keys definitions require using `track_record_deletions`
for the following tables: #{missing_trigger_tables}.
Read more at https://docs.gitlab.com/ee/development/database/loose_foreign_keys.html."
END
end
it 'no extra tables have `track_record_deletions` installed' do
extra_trigger_tables = all_tables_with_triggers - all_tables_from_yaml
pending 'This result of this test is informatory, and not critical' if extra_trigger_tables.any?
expect(extra_trigger_tables).to be_empty, <<~END
The following tables have unused `track_record_deletions` triggers installed,
but they are not referenced by any of the loose foreign key definitions: #{extra_trigger_tables}.
You can remove them in one of the future releases as part of `db/post_migrate`.
Read more at https://docs.gitlab.com/ee/development/database/loose_foreign_keys.html."
END
end
end

View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
require "spec_helper"
require_migration!
RSpec.describe DropPackagesEventsTable, feature_category: :package_registry do
let(:table) { described_class::SOURCE_TABLE }
let(:column) { described_class::COLUMN }
subject { described_class.new }
it 'drops and creates the packages_events table' do
reversible_migration do |migration|
migration.before -> do
expect(subject.table_exists?(:packages_events)).to eq(true)
end
migration.after -> do
expect(subject.table_exists?(:packages_events)).to eq(false)
end
end
end
end

View File

@ -0,0 +1,35 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe EnsureSnippetUserMentionsBigintBackfillIsFinishedForGitlabDotCom, feature_category: :database do
describe '#up' do
let(:migration_arguments) do
{
job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
table_name: 'snippet_user_mentions',
column_name: 'id',
job_arguments: [['note_id'], ['note_id_convert_to_bigint']]
}
end
it 'ensures the migration is completed for GitLab.com, dev, or test' do
expect_next_instance_of(described_class) do |instance|
expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments)
end
migrate!
end
it 'skips the check for other instances' do
expect_next_instance_of(described_class) do |instance|
expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
expect(instance).not_to receive(:ensure_batched_background_migration_is_finished)
end
migrate!
end
end
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
require "spec_helper"
require_migration!
RSpec.describe RemovePackagesEventsPackageIdFk, feature_category: :package_registry do
let(:table) { described_class::SOURCE_TABLE }
let(:column) { described_class::COLUMN }
let(:foreign_key) { -> { described_class.new.foreign_keys_for(table, column).first } }
it 'drops and creates the foreign key' do
reversible_migration do |migration|
migration.before -> do
expect(foreign_key.call).to have_attributes(column: column.to_s)
end
migration.after -> do
expect(foreign_key.call).to be(nil)
end
end
end
end

View File

@ -0,0 +1,66 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe SwapSnippetUserMentionsNoteIdToBigintForGitlabDotCom, feature_category: :database do
describe '#up' do
before do
# A we call `schema_migrate_down!` before each example, and for this migration
# `#down` is same as `#up`, we need to ensure we start from the expected state.
connection = described_class.new.connection
connection.execute('ALTER TABLE snippet_user_mentions ALTER COLUMN note_id TYPE integer')
connection.execute('ALTER TABLE snippet_user_mentions ALTER COLUMN note_id_convert_to_bigint TYPE bigint')
end
# rubocop: disable RSpec/AnyInstanceOf
it 'swaps the integer and bigint columns for GitLab.com, dev, or test' do
allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
user_mentions = table(:snippet_user_mentions)
disable_migrations_output do
reversible_migration do |migration|
migration.before -> {
user_mentions.reset_column_information
expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer')
expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint')
}
migration.after -> {
user_mentions.reset_column_information
expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('bigint')
expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('integer')
}
end
end
end
it 'is a no-op for other instances' do
allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
user_mentions = table(:snippet_user_mentions)
disable_migrations_output do
reversible_migration do |migration|
migration.before -> {
user_mentions.reset_column_information
expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer')
expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint')
}
migration.after -> {
user_mentions.reset_column_information
expect(user_mentions.columns.find { |c| c.name == 'note_id' }.sql_type).to eq('integer')
expect(user_mentions.columns.find { |c| c.name == 'note_id_convert_to_bigint' }.sql_type).to eq('bigint')
}
end
end
end
# rubocop: enable RSpec/AnyInstanceOf
end
end

View File

@ -7,7 +7,7 @@ require 'capybara-screenshot/rspec'
require 'selenium-webdriver'
# Give CI some extra time
timeout = ENV['CI'] || ENV['CI_SERVER'] ? 30 : 10
timeout = ENV['CI'] || ENV['CI_SERVER'] ? 45 : 10
# Support running Capybara on a specific port to allow saving commonly used pages
Capybara.server_port = ENV['CAPYBARA_PORT'] if ENV['CAPYBARA_PORT']

View File

@ -6,7 +6,7 @@ require (
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0
github.com/BurntSushi/toml v1.2.1
github.com/FZambia/sentinel v1.1.1
github.com/alecthomas/chroma/v2 v2.5.0
github.com/alecthomas/chroma/v2 v2.6.0
github.com/aws/aws-sdk-go v1.44.218
github.com/disintegration/imaging v1.6.2
github.com/getsentry/raven-go v0.2.0
@ -36,7 +36,7 @@ require (
golang.org/x/oauth2 v0.5.0
golang.org/x/tools v0.6.0
google.golang.org/grpc v1.53.0
google.golang.org/protobuf v1.29.1
google.golang.org/protobuf v1.30.0
honnef.co/go/tools v0.3.3
)

View File

@ -535,8 +535,8 @@ github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw=
github.com/alecthomas/assert/v2 v2.2.1 h1:XivOgYcduV98QCahG8T5XTezV5bylXe+lBxLG2K2ink=
github.com/alecthomas/chroma/v2 v2.5.0 h1:CQCdj1BiBV17sD4Bd32b/Bzuiq/EqoNTrnIhyQAZ+Rk=
github.com/alecthomas/chroma/v2 v2.5.0/go.mod h1:yrkMI9807G1ROx13fhe1v6PN2DDeaR73L3d+1nmYQtw=
github.com/alecthomas/chroma/v2 v2.6.0 h1:1L4tjWoEqtLbld6XHdK8ewYY51YMLOGRv21dAFHjLnQ=
github.com/alecthomas/chroma/v2 v2.6.0/go.mod h1:yrkMI9807G1ROx13fhe1v6PN2DDeaR73L3d+1nmYQtw=
github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
@ -2847,8 +2847,8 @@ google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
google.golang.org/protobuf v1.29.1 h1:7QBf+IK2gx70Ap/hDsOmam3GE0v9HicjfEdAxE62UoM=
google.golang.org/protobuf v1.29.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng=
google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
gopkg.in/DataDog/dd-trace-go.v1 v1.32.0 h1:DkD0plWEVUB8v/Ru6kRBW30Hy/fRNBC8hPdcExuBZMc=
gopkg.in/DataDog/dd-trace-go.v1 v1.32.0/go.mod h1:wRKMf/tRASHwH/UOfPQ3IQmVFhTz2/1a1/mpXoIjF54=
gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=