Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-06-18 12:10:03 +00:00
parent e6779ab919
commit 38a1a6cb91
69 changed files with 11525 additions and 185 deletions

View File

@ -107,3 +107,7 @@ overrides:
import/no-nodejs-modules: off
filenames/match-regex: off
no-console: off
- files:
- '*.stories.js'
rules:
filenames/match-regex: off

View File

@ -2,6 +2,10 @@
- source scripts/utils.sh
- run_timed_command "retry yarn install --frozen-lockfile"
.storybook-yarn-install: &storybook-yarn-install
- source scripts/utils.sh
- run_timed_command "retry yarn run storybook:install --frozen-lockfile"
.compile-assets-base:
extends:
- .default-retry
@ -80,6 +84,15 @@ update-yarn-cache:
script:
- *yarn-install
update-storybook-yarn-cache:
extends:
- .default-retry
- .storybook-yarn-cache-push
- .shared:rules:update-cache
stage: prepare
script:
- *storybook-yarn-install
.frontend-fixtures-base:
extends:
- .default-retry
@ -344,3 +357,29 @@ startup-css-check as-if-foss:
needs:
- job: "compile-test-assets as-if-foss"
- job: "rspec frontend_fixture as-if-foss"
.compile-storybook-base:
extends:
- .frontend-test-base
- .storybook-yarn-cache
script:
- *yarn-install # storybook depends on the global webpack config, so we must install global deps.
- *storybook-yarn-install
- yarn run storybook:build
compile-storybook:
extends:
- .compile-storybook-base
- .frontend:rules:default-frontend-jobs
artifacts:
name: storybook
expire_in: 31d
when: always
paths:
- storybook/public
compile-storybook as-if-foss:
extends:
- .compile-storybook-base
- .as-if-foss
- .frontend:rules:default-frontend-jobs-as-if-foss

View File

@ -88,6 +88,16 @@
<<: *assets-cache
policy: push # We want to rebuild the cache from scratch to ensure stale dependencies are cleaned up.
.storybook-node-modules-cache: &storybook-node-modules-cache
key: "storybook-node-modules-${NODE_ENV}-v1"
paths:
- storybook/node_modules/
policy: pull
.storybook-node-modules-cache-push: &storybook-node-modules-cache-push
<<: *storybook-node-modules-cache
policy: push # We want to rebuild the cache from scratch to ensure stale dependencies are cleaned up.
.rubocop-cache: &rubocop-cache
key: "rubocop-v1"
paths:
@ -181,6 +191,14 @@
- *node-modules-cache-push
- *assets-cache-push
.storybook-yarn-cache:
cache:
- *storybook-node-modules-cache
.storybook-yarn-cache-push:
cache:
- *storybook-node-modules-cache-push
.use-pg11:
image: "registry.gitlab.com/gitlab-org/gitlab-build-images:ruby-2.7.2.patched-golang-1.16-git-2.31-lfs-2.9-chrome-89-node-14.15-yarn-1.22-postgresql-11-graphicsmagick-1.3.36"
services:

View File

@ -8,12 +8,14 @@ pages:
- coverage-frontend
- karma
- compile-production-assets
- compile-storybook
script:
- mv public/ .public/
- mkdir public/
- mv coverage/ public/coverage-ruby/ || true
- mv coverage-frontend/ public/coverage-frontend/ || true
- mv coverage-javascript/ public/coverage-javascript/ || true
- mv storybook/public public/storybook || true
- cp .public/assets/application-*.css public/application.css || true
- cp .public/assets/application-*.css.gz public/application.css.gz || true
artifacts:

View File

@ -251,6 +251,22 @@ static-analysis as-if-foss:
- .static-analysis:rules:as-if-foss
- .as-if-foss
zeitwerk-check:
extends:
- .rails-cache
- .default-before_script
- .rails:rules:ee-and-foss-unit
variables:
BUNDLE_WITHOUT: ""
SETUP_DB: "false"
needs: []
stage: test
script:
- sed -i -e "s/config\.autoloader = :classic/config\.autoloader = :zeitwerk/" config/application.rb
- RAILS_ENV=test bundle exec rake zeitwerk:check
- RAILS_ENV=development bundle exec rake zeitwerk:check
- RAILS_ENV=production bundle exec rake zeitwerk:check
rspec migration pg12:
extends:
- .rspec-base-pg12

View File

@ -30,7 +30,7 @@ export const fetchProjects = ({ commit, state }, search) => {
if (groupId) {
// TODO (https://gitlab.com/gitlab-org/gitlab/-/issues/323331): For errors `createFlash` is called twice; in `callback` and in `Api.groupProjects`
Api.groupProjects(groupId, search, {}, callback);
Api.groupProjects(groupId, search, { order_by: 'similarity' }, callback);
} else {
// The .catch() is due to the API method not handling a rejection properly
Api.projects(search, { order_by: 'id' }, callback).catch(() => {

View File

@ -22,8 +22,16 @@ export default {
required: false,
default: '',
},
pronouns: {
type: String,
required: false,
default: '',
},
},
computed: {
hasPronouns() {
return this.pronouns !== null && this.pronouns.trim() !== '';
},
isBusy() {
return isUserBusy(this.availability);
},
@ -32,9 +40,18 @@ export default {
</script>
<template>
<span :class="containerClasses">
<gl-sprintf v-if="isBusy" :message="s__('UserAvailability|%{author} (Busy)')">
<template #author>{{ name }}</template>
<gl-sprintf :message="s__('UserAvailability|%{author} %{spanStart}(Busy)%{spanEnd}')">
<template #author
>{{ name }}
<span v-if="hasPronouns" class="gl-text-gray-500 gl-font-sm gl-font-weight-normal"
>({{ pronouns }})</span
></template
>
<template #span="{ content }"
><span v-if="isBusy" class="gl-text-gray-500 gl-font-sm gl-font-weight-normal">{{
content
}}</span>
</template>
</gl-sprintf>
<template v-else>{{ name }}</template>
</span>
</template>

View File

@ -44,6 +44,7 @@ const populateUserInfo = (user) => {
bioHtml: sanitize(userData.bio_html),
workInformation: userData.work_information,
websiteUrl: userData.website_url,
pronouns: userData.pronouns,
loaded: true,
});
}

View File

@ -0,0 +1,23 @@
/* eslint-disable @gitlab/require-i18n-strings */
import TodoButton from './todo_button.vue';
export default {
component: TodoButton,
title: 'vue_shared/components/todo_button',
};
const Template = (args, { argTypes }) => ({
components: { TodoButton },
props: Object.keys(argTypes),
template: '<todo-button v-bind="$props" v-on="$props" />',
});
export const Default = Template.bind({});
Default.argTypes = {
isTodo: {
description: 'True if to-do is unresolved (i.e. not "done")',
control: { type: 'boolean' },
},
click: { action: 'clicked' },
};

View File

@ -72,7 +72,11 @@ export default {
<template v-else>
<div class="gl-mb-3">
<h5 class="gl-m-0">
<user-name-with-status :name="user.name" :availability="availabilityStatus" />
<user-name-with-status
:name="user.name"
:availability="availabilityStatus"
:pronouns="user.pronouns"
/>
</h5>
<span class="gl-text-gray-500">@{{ user.username }}</span>
</div>

View File

@ -49,6 +49,7 @@ class Integration < ApplicationRecord
hangouts_chat
irker
packagist pipelines_email pivotaltracker pushover
mattermost mattermost_slash_commands microsoft_teams mock_ci mock_monitoring
].to_set.freeze
def self.renamed?(name)

View File

@ -174,10 +174,11 @@ class Project < ApplicationRecord
has_one :irker_integration, class_name: 'Integrations::Irker'
has_one :jenkins_service, class_name: 'Integrations::Jenkins'
has_one :jira_service, class_name: 'Integrations::Jira'
has_one :mattermost_service, class_name: 'Integrations::Mattermost'
has_one :mattermost_slash_commands_service, class_name: 'Integrations::MattermostSlashCommands'
has_one :microsoft_teams_service, class_name: 'Integrations::MicrosoftTeams'
has_one :mock_ci_service, class_name: 'Integrations::MockCi'
has_one :mattermost_integration, class_name: 'Integrations::Mattermost'
has_one :mattermost_slash_commands_integration, class_name: 'Integrations::MattermostSlashCommands'
has_one :microsoft_teams_integration, class_name: 'Integrations::MicrosoftTeams'
has_one :mock_ci_integration, class_name: 'Integrations::MockCi'
has_one :mock_monitoring_integration, class_name: 'MockMonitoringService'
has_one :packagist_integration, class_name: 'Integrations::Packagist'
has_one :pipelines_email_integration, class_name: 'Integrations::PipelinesEmail'
has_one :pivotaltracker_integration, class_name: 'Integrations::Pivotaltracker'
@ -190,7 +191,6 @@ class Project < ApplicationRecord
has_one :webex_teams_service, class_name: 'Integrations::WebexTeams'
has_one :youtrack_service, class_name: 'Integrations::Youtrack'
has_one :prometheus_service, inverse_of: :project
has_one :mock_monitoring_service
has_one :root_of_fork_network,
foreign_key: 'root_project_id',

View File

@ -10,6 +10,10 @@ module Ci
resource_group scheduling_type].freeze
end
def self.extra_accessors
[]
end
def execute(build)
build.ensure_scheduling_type!

View File

@ -8,7 +8,7 @@
= form.label :only_allow_merge_if_pipeline_succeeds, class: 'form-check-label' do
= s_('ProjectSettings|Pipelines must succeed')
.text-secondary
- configuring_pipelines_for_merge_requests_help_link_url = help_page_path('ci/merge_request_pipelines/index.md', anchor: 'configuring-pipelines-for-merge-requests')
- configuring_pipelines_for_merge_requests_help_link_url = help_page_path('ci/merge_request_pipelines/index.md', anchor: 'configure-pipelines-for-merge-requests')
- configuring_pipelines_for_merge_requests_help_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: configuring_pipelines_for_merge_requests_help_link_url }
= s_('ProjectSettings|To enable this feature, configure pipelines. %{link_start}How to configure pipelines for merge requests?%{link_end}').html_safe % { link_start: configuring_pipelines_for_merge_requests_help_link_start, link_end: '</a>'.html_safe }
.form-check.mb-2

View File

@ -32,6 +32,8 @@ module Gitlab
require_dependency Rails.root.join('lib/gitlab/middleware/rack_multipart_tempfile_factory')
require_dependency Rails.root.join('lib/gitlab/runtime')
config.autoloader = :classic
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.

View File

@ -50,7 +50,7 @@ Rails.application.configure do
config.action_mailer.raise_delivery_errors = true
# Don't make a mess when bootstrapping a development environment
config.action_mailer.perform_deliveries = (ENV['BOOTSTRAP'] != '1')
config.action_mailer.preview_path = 'app/mailers/previews'
config.action_mailer.preview_path = "#{Rails.root}{/ee,}/app/mailers/previews"
config.eager_load = false

View File

@ -7,7 +7,7 @@ rescue Gitlab::Runtime::IdentificationError => e
\n!! RUNTIME IDENTIFICATION FAILED: #{e}
Runtime based configuration settings may not work properly.
If you continue to see this error, please file an issue via
https://gitlab.com/gitlab-org/gitlab/issues/new
https://gitlab.com/gitlab-org/gitlab/-/issues/new
NOTICE
Gitlab::AppLogger.error(message)
Gitlab::ErrorTracking.track_exception(e)

View File

@ -1,3 +1,3 @@
# frozen_string_literal: true
GlobalID.prepend(Gitlab::Patch::GlobalID)
GlobalID.prepend(Gitlab::Patch::GlobalId)

View File

@ -0,0 +1,71 @@
# frozen_string_literal: true
Rails.autoloaders.each do |autoloader|
# We need to ignore these since these are non-Ruby files
# that do not define Ruby classes / modules
autoloader.ignore(Rails.root.join('lib/support'))
# Ignore generators since these are loaded manually by Rails
# https://github.com/rails/rails/blob/v6.1.3.2/railties/lib/rails/command/behavior.rb#L56-L65
autoloader.ignore(Rails.root.join('lib/generators'))
autoloader.ignore(Rails.root.join('ee/lib/generators')) if Gitlab.ee?
# Mailer previews are also loaded manually by Rails
# https://github.com/rails/rails/blob/v6.1.3.2/actionmailer/lib/action_mailer/preview.rb#L121-L125
autoloader.ignore(Rails.root.join('app/mailers/previews'))
autoloader.ignore(Rails.root.join('ee/app/mailers/previews')) if Gitlab.ee?
autoloader.inflector.inflect(
'api' => 'API',
'api_authentication' => 'APIAuthentication',
'api_guard' => 'APIGuard',
'group_api_compatibility' => 'GroupAPICompatibility',
'project_api_compatibility' => 'ProjectAPICompatibility',
'ast' => 'AST',
'cte' => 'CTE',
'recursive_cte' => 'RecursiveCTE',
'cidr' => 'CIDR',
'cli' => 'CLI',
'dn' => 'DN',
'global_id_type' => 'GlobalIDType',
'global_id_compatibility' => 'GlobalIDCompatibility',
'hll' => 'HLL',
'hll_redis_counter' => 'HLLRedisCounter',
'redis_hll_metric' => 'RedisHLLMetric',
'hmac_token' => 'HMACToken',
'html' => 'HTML',
'html_parser' => 'HTMLParser',
'html_gitlab' => 'HTMLGitlab',
'http' => 'HTTP',
'http_connection_adapter' => 'HTTPConnectionAdapter',
'http_clone_enabled_check' => 'HTTPCloneEnabledCheck',
'hangouts_chat_http_override' => 'HangoutsChatHTTPOverride',
'chunked_io' => 'ChunkedIO',
'http_io' => 'HttpIO',
'json_formatter' => 'JSONFormatter',
'json_web_token' => 'JSONWebToken',
'as_json' => 'AsJSON',
'jwt_token' => 'JWTToken',
'ldap_key' => 'LDAPKey',
'mr_note' => 'MRNote',
'pdf' => 'PDF',
'rsa_token' => 'RSAToken',
'san_extension' => 'SANExtension',
'sca' => 'SCA',
'spdx' => 'SPDX',
'sql' => 'SQL',
'sse_helpers' => 'SSEHelpers',
'ssh_key' => 'SSHKey',
'ssh_key_with_user' => 'SSHKeyWithUser',
'ssh_public_key' => 'SSHPublicKey',
'git_ssh_proxy' => 'GitSSHProxy',
'git_user_default_ssh_config_check' => 'GitUserDefaultSSHConfigCheck',
'binary_stl' => 'BinarySTL',
'text_stl' => 'TextSTL',
'svg' => 'SVG',
'function_uri' => 'FunctionURI',
'uuid' => 'UUID',
'vulnerability_uuid' => 'VulnerabilityUUID',
'vs_code_extension_activity_unique_counter' => 'VSCodeExtensionActivityUniqueCounter'
)
end

View File

@ -6,7 +6,7 @@ class Gitlab::Seeder::Environments
end
def seed!
@project.create_mock_monitoring_service!(active: true)
@project.create_mock_monitoring_integration!(active: true)
create_master_deployments!('production')
create_master_deployments!('staging')

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class CreateDastSiteProfilesBuilds < ActiveRecord::Migration[6.1]
def up
table_comment = { owner: 'group::dynamic analysis', description: 'Join table between DAST Site Profiles and CI Builds' }
create_table :dast_site_profiles_builds, primary_key: [:dast_site_profile_id, :ci_build_id], comment: table_comment.to_json do |t|
t.bigint :dast_site_profile_id, null: false
t.bigint :ci_build_id, null: false
t.index :ci_build_id, unique: true, name: :dast_site_profiles_builds_on_ci_build_id
end
end
def down
drop_table :dast_site_profiles_builds
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddCiBuildIdFkToDastSiteProfilesBuilds < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_foreign_key :dast_site_profiles_builds, :ci_builds, column: :ci_build_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :dast_site_profiles_builds, column: :ci_build_id
end
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddDastSiteProfileIdFkToDastSiteProfilesBuilds < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_foreign_key :dast_site_profiles_builds, :dast_site_profiles, column: :dast_site_profile_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :dast_site_profiles_builds, column: :dast_site_profile_id
end
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class CreateDastScannerProfilesBuilds < ActiveRecord::Migration[6.1]
def up
table_comment = { owner: 'group::dynamic analysis', description: 'Join table between DAST Scanner Profiles and CI Builds' }
create_table :dast_scanner_profiles_builds, primary_key: [:dast_scanner_profile_id, :ci_build_id], comment: table_comment.to_json do |t|
t.bigint :dast_scanner_profile_id, null: false
t.bigint :ci_build_id, null: false
t.index :ci_build_id, unique: true, name: :dast_scanner_profiles_builds_on_ci_build_id
end
end
def down
drop_table :dast_scanner_profiles_builds
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddCiBuildIdFkToDastScannerProfilesBuilds < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_foreign_key :dast_scanner_profiles_builds, :ci_builds, column: :ci_build_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :dast_scanner_profiles_builds, column: :ci_build_id
end
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class AddDastScannerProfileIdFkToDastScannerProfilesBuilds < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_foreign_key :dast_scanner_profiles_builds, :dast_scanner_profiles, column: :dast_scanner_profile_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :dast_scanner_profiles_builds, column: :dast_scanner_profile_id
end
end
end

View File

@ -0,0 +1 @@
fa373e98739d57d829273cfa9246137e2c151be67e97183c1dcdb288150aaeb5

View File

@ -0,0 +1 @@
c7cf4aad7637d793d1ace8fee02111bc9b0d2eea09efadb0fd616bc5c5e5550c

View File

@ -0,0 +1 @@
da868be7c8edefc462110b5b36415870cc0c7c59dba1e3d514348011a9e70642

View File

@ -0,0 +1 @@
2d025932dca7a407968e14872ce053461e69550098ca089d4e6ece323d240927

View File

@ -0,0 +1 @@
7529373266b6c9b179367d5fa8775f5e2ad600008957b3a821d689aec70c7407

View File

@ -0,0 +1 @@
3818094a4470ff7d0c105c000655dac4205e8265f78df638df0e2ef3dc6deaf3

View File

@ -12047,6 +12047,13 @@ CREATE TABLE dast_scanner_profiles (
CONSTRAINT check_568568fabf CHECK ((char_length(name) <= 255))
);
CREATE TABLE dast_scanner_profiles_builds (
dast_scanner_profile_id bigint NOT NULL,
ci_build_id bigint NOT NULL
);
COMMENT ON TABLE dast_scanner_profiles_builds IS '{"owner":"group::dynamic analysis","description":"Join table between DAST Scanner Profiles and CI Builds"}';
CREATE SEQUENCE dast_scanner_profiles_id_seq
START WITH 1
INCREMENT BY 1
@ -12102,6 +12109,13 @@ CREATE TABLE dast_site_profiles (
CONSTRAINT check_f22f18002a CHECK ((char_length(auth_username) <= 255))
);
CREATE TABLE dast_site_profiles_builds (
dast_site_profile_id bigint NOT NULL,
ci_build_id bigint NOT NULL
);
COMMENT ON TABLE dast_site_profiles_builds IS '{"owner":"group::dynamic analysis","description":"Join table between DAST Site Profiles and CI Builds"}';
CREATE SEQUENCE dast_site_profiles_id_seq
START WITH 1
INCREMENT BY 1
@ -21092,12 +21106,18 @@ ALTER TABLE ONLY dast_profiles_pipelines
ALTER TABLE ONLY dast_profiles
ADD CONSTRAINT dast_profiles_pkey PRIMARY KEY (id);
ALTER TABLE ONLY dast_scanner_profiles_builds
ADD CONSTRAINT dast_scanner_profiles_builds_pkey PRIMARY KEY (dast_scanner_profile_id, ci_build_id);
ALTER TABLE ONLY dast_scanner_profiles
ADD CONSTRAINT dast_scanner_profiles_pkey PRIMARY KEY (id);
ALTER TABLE ONLY dast_site_profile_secret_variables
ADD CONSTRAINT dast_site_profile_secret_variables_pkey PRIMARY KEY (id);
ALTER TABLE ONLY dast_site_profiles_builds
ADD CONSTRAINT dast_site_profiles_builds_pkey PRIMARY KEY (dast_site_profile_id, ci_build_id);
ALTER TABLE ONLY dast_site_profiles_pipelines
ADD CONSTRAINT dast_site_profiles_pipelines_pkey PRIMARY KEY (dast_site_profile_id, ci_pipeline_id);
@ -22343,6 +22363,10 @@ CREATE INDEX commit_id_and_note_id_index ON commit_user_mentions USING btree (co
CREATE INDEX composer_cache_files_index_on_deleted_at ON packages_composer_cache_files USING btree (delete_at, id);
CREATE UNIQUE INDEX dast_scanner_profiles_builds_on_ci_build_id ON dast_scanner_profiles_builds USING btree (ci_build_id);
CREATE UNIQUE INDEX dast_site_profiles_builds_on_ci_build_id ON dast_site_profiles_builds USING btree (ci_build_id);
CREATE UNIQUE INDEX design_management_designs_versions_uniqueness ON design_management_designs_versions USING btree (design_id, version_id);
CREATE INDEX design_user_mentions_on_design_id_and_note_id_index ON design_user_mentions USING btree (design_id, note_id);
@ -25705,6 +25729,9 @@ ALTER TABLE ONLY vulnerability_feedback
ALTER TABLE ONLY deploy_keys_projects
ADD CONSTRAINT fk_58a901ca7e FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY dast_scanner_profiles_builds
ADD CONSTRAINT fk_5d46286ad3 FOREIGN KEY (dast_scanner_profile_id) REFERENCES dast_scanner_profiles(id) ON DELETE CASCADE;
ALTER TABLE ONLY issue_assignees
ADD CONSTRAINT fk_5e0c8d9154 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
@ -25864,6 +25891,9 @@ ALTER TABLE ONLY ci_pipeline_schedules
ALTER TABLE ONLY todos
ADD CONSTRAINT fk_91d1f47b13 FOREIGN KEY (note_id) REFERENCES notes(id) ON DELETE CASCADE;
ALTER TABLE ONLY dast_site_profiles_builds
ADD CONSTRAINT fk_94e80df60e FOREIGN KEY (dast_site_profile_id) REFERENCES dast_site_profiles(id) ON DELETE CASCADE;
ALTER TABLE ONLY vulnerability_feedback
ADD CONSTRAINT fk_94f7c8a81e FOREIGN KEY (comment_author_id) REFERENCES users(id) ON DELETE SET NULL;
@ -25927,6 +25957,9 @@ ALTER TABLE ONLY ci_builds
ALTER TABLE ONLY ci_pipelines
ADD CONSTRAINT fk_a23be95014 FOREIGN KEY (merge_request_id) REFERENCES merge_requests(id) ON DELETE CASCADE;
ALTER TABLE ONLY dast_site_profiles_builds
ADD CONSTRAINT fk_a325505e99 FOREIGN KEY (ci_build_id) REFERENCES ci_builds(id) ON DELETE CASCADE;
ALTER TABLE ONLY bulk_import_entities
ADD CONSTRAINT fk_a44ff95be5 FOREIGN KEY (parent_id) REFERENCES bulk_import_entities(id) ON DELETE CASCADE;
@ -26137,6 +26170,9 @@ ALTER TABLE ONLY gitlab_subscriptions
ALTER TABLE ONLY ci_triggers
ADD CONSTRAINT fk_e3e63f966e FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY dast_scanner_profiles_builds
ADD CONSTRAINT fk_e4c49200f8 FOREIGN KEY (ci_build_id) REFERENCES ci_builds(id) ON DELETE CASCADE;
ALTER TABLE ONLY merge_requests
ADD CONSTRAINT fk_e719a85f8a FOREIGN KEY (author_id) REFERENCES users(id) ON DELETE SET NULL;

View File

@ -64,7 +64,7 @@ GET /groups/:id/epics?state=opened
| `author_id` | integer | no | Return epics created by the given user `id` |
| `labels` | string | no | Return epics matching a comma separated list of labels names. Label names from the epic group or a parent group can be used |
| `with_labels_details` | boolean | no | If `true`, response returns more details for each label in labels field: `:name`, `:color`, `:description`, `:description_html`, `:text_color`. Default is `false`. Available in [GitLab 12.7](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21413) and later |
| `order_by` | string | no | Return epics ordered by `created_at` or `updated_at` fields. Default is `created_at` |
| `order_by` | string | no | Return epics ordered by `created_at`, `updated_at`, or `title` fields. Default is `created_at` |
| `sort` | string | no | Return epics sorted in `asc` or `desc` order. Default is `desc` |
| `search` | string | no | Search epics against their `title` and `description` |
| `state` | string | no | Search epics against their `state`, possible filters: `opened`, `closed` and `all`, default: `all` |

View File

@ -14234,6 +14234,8 @@ Roadmap sort values.
| <a id="epicsortend_date_desc"></a>`END_DATE_DESC` | Sort by end date in descending order. |
| <a id="epicsortstart_date_asc"></a>`START_DATE_ASC` | Sort by start date in ascending order. |
| <a id="epicsortstart_date_desc"></a>`START_DATE_DESC` | Sort by start date in descending order. |
| <a id="epicsorttitle_asc"></a>`TITLE_ASC` | Sort by title in ascending order. |
| <a id="epicsorttitle_desc"></a>`TITLE_DESC` | Sort by title in descending order. |
| <a id="epicsortend_date_asc"></a>`end_date_asc` **{warning-solid}** | **Deprecated** in 13.11. Use END_DATE_ASC. |
| <a id="epicsortend_date_desc"></a>`end_date_desc` **{warning-solid}** | **Deprecated** in 13.11. Use END_DATE_DESC. |
| <a id="epicsortstart_date_asc"></a>`start_date_asc` **{warning-solid}** | **Deprecated** in 13.11. Use START_DATE_ASC. |

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.2 KiB

View File

@ -6,17 +6,17 @@ type: reference, index
last_update: 2019-07-03
---
# Pipelines for Merge Requests
# Pipelines for merge requests
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/15310) in GitLab 11.6.
In a [basic configuration](../pipelines/pipeline_architectures.md#basic-pipelines), GitLab runs a pipeline each time
changes are pushed to a branch.
If you want the pipeline to run jobs **only** on commits to a branch that is associated with a merge request,
If you want the pipeline to run jobs **only** on commits associated with a merge request,
you can use *pipelines for merge requests*.
In the UI, these pipelines are labeled as `detached`. Otherwise, these pipelines appear the same
In the UI, these pipelines are labeled as `detached`. Otherwise, these pipelines are the same
as other pipelines.
Pipelines for merge requests can run when you:
@ -25,13 +25,8 @@ Pipelines for merge requests can run when you:
- Commit changes to the source branch for the merge request.
- Select the **Run pipeline** button from the **Pipelines** tab in the merge request.
Any user who has developer [permissions](../../user/permissions.md)
can run a pipeline for merge requests.
![Merge request page](img/merge_request.png)
If you use this feature with [merge when pipeline succeeds](../../user/project/merge_requests/merge_when_pipeline_succeeds.md),
pipelines for merge requests take precedence over the other regular pipelines.
pipelines for merge requests take precedence over other pipelines.
## Prerequisites
@ -39,29 +34,24 @@ To enable pipelines for merge requests:
- Your repository must be a GitLab repository, not an
[external repository](../ci_cd_for_external_repos/index.md).
- [In GitLab 11.10 and later](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/25504),
you must be using GitLab Runner 11.9.
- You must have the Developer [role](../../user/permissions.md)
to run a pipeline for merge requests.
## Configuring pipelines for merge requests
## Configure pipelines for merge requests
To configure pipelines for merge requests you need to configure your [CI/CD configuration file](../yaml/README.md).
There are a few different ways to do this:
To configure pipelines for merge requests, you must configure your [CI/CD configuration file](../yaml/README.md).
To do this, you can use [`rules`](#use-rules-to-run-pipelines-for-merge-requests) or [`only/except`](#use-only-or-except-to-run-pipelines-for-merge-requests).
### Use `rules` to run pipelines for merge requests
When using `rules`, which is the preferred method, we recommend starting with one
of the [`workflow:rules` templates](../yaml/README.md#workflowrules-templates) to ensure
your basic configuration is correct. Instructions on how to do this, as well as how
to customize, are available at that link.
GitLab recommends that you use the `rules` keyword, which is available in
[`workflow:rules` templates](../yaml/README.md#workflowrules-templates).
### Use `only` or `except` to run pipelines for merge requests
If you want to continue using `only/except`, this is possible but please review the drawbacks
below.
When you use this method, you have to specify `only: - merge_requests` for each job. In this
example, the pipeline contains a `test` job that is configured to run on merge requests.
You can use the `only/except` keywords. However, with this method, you must specify `only: - merge_requests` for each job.
In the following example, the pipeline contains a `test` job that is configured to run on merge requests.
The `build` and `deploy` jobs don't have the `only: - merge_requests` keyword,
so they don't run on merge requests.
@ -85,20 +75,18 @@ deploy:
- main
```
#### Excluding certain jobs
#### Exclude specific jobs
The behavior of the `only: [merge_requests]` keyword is such that _only_ jobs with
that keyword are run in the context of a merge request; no other jobs run.
When you use `only: [merge_requests]`, only jobs with
that keyword are run in the context of a merge request. No other jobs run.
However, you can invert this behavior and have all of your jobs run _except_
for one or two.
Consider the following pipeline, with jobs `A`, `B`, and `C`. Imagine you want:
However, you can invert this behavior and have all of your jobs run except
for one or two. For example, you might have a pipeline with jobs `A`, `B`, and `C`, and you want:
- All pipelines to always run `A` and `B`.
- `C` to run only for merge requests.
To achieve this, you can configure your `.gitlab-ci.yml` file as follows:
To achieve this outcome, configure your `.gitlab-ci.yml` file as follows:
```yaml
.only-default: &only-default
@ -124,23 +112,20 @@ C:
- merge_requests
```
Therefore:
- Since `A` and `B` are getting the `only:` rule to execute in all cases, they always run.
- Since `C` specifies that it should only run for merge requests, it doesn't run for any pipeline
- `A` and `B` always run, because they get the `only:` rule to execute in all cases.
- `C` only runs for merge requests. It doesn't run for any pipeline
except a merge request pipeline.
This helps you avoid having to add the `only:` rule to all of your jobs to make
them always run. You can use this format to set up a Review App, helping to
In this example, you don't have to add the `only:` rule to all of your jobs to make
them always run. You can use this format to set up a Review App, which helps to
save resources.
#### Excluding certain branches
#### Exclude specific branches
Pipelines for merge requests require special treatment when
using [`only`/`except`](../yaml/README.md#only--except). Unlike ordinary
branch refs (for example `refs/heads/my-feature-branch`), merge request refs
use a special Git reference that looks like `refs/merge-requests/:iid/head`. Because
of this, the following configuration will **not** work as expected:
Branch refs use this format: `refs/heads/my-feature-branch`.
Merge request refs use this format: `refs/merge-requests/:iid/head`.
Because of this difference, the following configuration does not work as expected:
```yaml
# Does not exclude a branch named "docs-my-fix"!
@ -149,7 +134,7 @@ test:
except: [/^docs-/]
```
Instead, you can use the
Instead, use the
[`$CI_COMMIT_REF_NAME` predefined environment
variable](../variables/predefined_variables.md) in
combination with
@ -164,55 +149,43 @@ test:
- $CI_COMMIT_REF_NAME =~ /^docs-/
```
## Pipelines for Merged Results **(PREMIUM)**
Read the [documentation on Pipelines for Merged Results](pipelines_for_merged_results/index.md).
### Merge Trains **(PREMIUM)**
Read the [documentation on Merge Trains](pipelines_for_merged_results/merge_trains/index.md).
## Run pipelines in the parent project for merge requests from a forked project **(PREMIUM)**
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/217451) in GitLab 13.3.
> - [Moved](https://about.gitlab.com/blog/2021/01/26/new-gitlab-product-subscription-model/) to GitLab Premium in 13.9.
By default, external contributors working from forks can't create pipelines in the
parent project. When a pipeline for merge requests is triggered by a merge request
coming from a fork:
By default, external contributors who work in forks can't create pipelines in the
parent project. When a merge request that comes from a fork triggers a pipeline:
- It's created and runs in the fork (source) project, not the parent (target) project.
- It uses the fork project's CI/CD configuration and resources.
- The pipeline is created and runs in the fork (source) project, not the parent (target) project.
- The pipeline uses the fork project's CI/CD configuration and resources.
If a pipeline runs in a fork, the **fork** icon appears for the pipeline in the merge request.
If a pipeline runs in a fork, a **fork** badge appears for the pipeline in the merge request.
![Pipeline ran in fork](img/pipeline-fork_v13_7.png)
Sometimes parent project members want the pipeline to run in the parent
project. This could be to ensure that the post-merge pipeline passes in the parent project.
project. They may want to ensure that the post-merge pipeline passes in the parent project.
For example, a fork project could try to use a corrupted runner that doesn't execute
test scripts properly, but reports a passed pipeline. Reviewers in the parent project
could mistakenly trust the merge request because it passed a faked pipeline.
Parent project members with at least [Developer permissions](../../user/permissions.md)
Parent project members with at least the [Developer role](../../user/permissions.md)
can create pipelines in the parent project for merge requests
from a forked project. In the merge request, go to the **Pipelines** and click
**Run pipeline** button.
from a forked project. In the merge request, go to the **Pipelines** tab and select
**Run pipeline**.
WARNING:
Fork merge requests could contain malicious code that tries to steal secrets in the
parent project when the pipeline runs, even before merge. Reviewers must carefully
Fork merge requests can contain malicious code that tries to steal secrets in the
parent project when the pipeline runs, even before merge. As a reviewer, you must carefully
check the changes in the merge request before triggering the pipeline. GitLab shows
a warning that must be accepted before the pipeline can be triggered.
a warning that you must accept before you can trigger the pipeline.
## Additional predefined variables
## Predefined variables available for pipelines for merge requests
By using pipelines for merge requests, GitLab exposes additional predefined variables to the pipeline jobs.
Those variables contain information of the associated merge request, so that it's useful
to integrate your job with [GitLab Merge Request API](../../api/merge_requests.md).
You can find the list of available variables in [the reference sheet](../variables/predefined_variables.md).
The variable names begin with the `CI_MERGE_REQUEST_` prefix.
When you use pipelines for merge requests, [additional predefined variables](../variables/predefined_variables.md#predefined-variables-for-merge-request-pipelines) are available to the CI/CD jobs.
These variables contain information from the associated merge request, so that you can
integrate your job with the [GitLab Merge Request API](../../api/merge_requests.md).
## Troubleshooting
@ -226,8 +199,8 @@ If you are seeing two pipelines when using `only/except`, please see the caveats
related to using `only/except` above (or, consider moving to `rules`).
In [GitLab 13.7](https://gitlab.com/gitlab-org/gitlab/-/issues/201845) and later,
you can add `workflow:rules` to [switch from branch pipelines to merge request pipelines](../yaml/README.md#switch-between-branch-pipelines-and-merge-request-pipelines)
after a merge request is open on the branch.
you can add `workflow:rules` to [switch from branch pipelines to merge request pipelines](../yaml/README.md#switch-between-branch-pipelines-and-merge-request-pipelines).
The pipeline switches to merge request pipelines this after a merge request is open on the branch.
### Two pipelines created when pushing an invalid CI configuration file
@ -235,3 +208,8 @@ Pushing to a branch with an invalid CI configuration file can trigger
the creation of two types of failed pipelines. One pipeline is a failed merge request
pipeline, and the other is a failed branch pipeline, but both are caused by the same
invalid configuration.
## Related topics
- [Pipelines for merged results](pipelines_for_merged_results/index.md).
- [Merge trains](pipelines_for_merged_results/merge_trains/index.md).

View File

@ -6,7 +6,7 @@ type: reference
last_update: 2019-07-03
---
# Pipelines for Merged Results **(PREMIUM)**
# Pipelines for merged results **(PREMIUM)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/7380) in [GitLab Premium](https://about.gitlab.com/pricing/) 11.10.
@ -57,7 +57,7 @@ To enable pipelines for merge results:
To enable pipelines for merged results for your project:
1. [Configure your CI/CD configuration file](../index.md#configuring-pipelines-for-merge-requests)
1. [Configure your CI/CD configuration file](../index.md#configure-pipelines-for-merge-requests)
so that the pipeline or individual jobs run for merge requests.
1. Visit your project's **Settings > General** and expand **Merge requests**.
1. Check **Enable merged results pipelines**.

View File

@ -81,7 +81,7 @@ To enable merge trains:
To enable merge trains for your project:
1. If you are on a self-managed GitLab instance, ensure the [feature flag](#merge-trains-feature-flag) is set correctly.
1. [Configure your CI/CD configuration file](../../index.md#configuring-pipelines-for-merge-requests)
1. [Configure your CI/CD configuration file](../../index.md#configure-pipelines-for-merge-requests)
so that the pipeline or individual jobs run for merge requests.
1. Visit your project's **Settings > General** and expand **Merge requests**.
1. In the **Merge method** section, verify that **Merge commit** is selected.

View File

@ -326,7 +326,7 @@ makes your pipelines run for branches and tags.
Branch pipeline status is displayed in merge requests that use the branch
as a source. However, this pipeline type does not support any features offered by
[merge request pipelines](../merge_request_pipelines/), like
[pipelines for merge results](../merge_request_pipelines/#pipelines-for-merged-results)
[pipelines for merged results](../merge_request_pipelines/pipelines_for_merged_results/index.md)
or [merge trains](../merge_request_pipelines/pipelines_for_merged_results/merge_trains/).
This template intentionally avoids those features.

View File

@ -0,0 +1,50 @@
---
stage: none
group: unassigned
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
---
# Storybook
The Storybook for the `gitlab-org/gitlab` project is available on our [GitLab Pages site](https://gitlab-org.gitlab.io/gitlab/storybook).
## Storybook in local development
Storybook dependencies and configuration are located under the `storybook/` directory.
To build and launch Storybook locally, in the root directory of the `gitlab` project:
1. Install Storybook dependencies:
```shell
yarn storybook:install
```
1. Build the Storybook site:
```shell
yarn storybook:start
```
## Adding components to Storybook
Stories can be added for any Vue component in the `gitlab` repository.
To add a story:
1. Create a new `.stories.js` file in the same directory as the Vue component.
The file name should have the same prefix as the Vue component.
```txt
vue_shared/
├─ components/
│ ├─ todo_button.vue
│ ├─ todo_button.stories.js
```
1. Write the story as per the [official Storybook instructions](https://storybook.js.org/docs/vue/writing-stories/introduction)
Notes:
- Specify the `title` field of the story as the component's file path from the `javascripts/` directory,
e.g. if the component is located at `app/assets/javascripts/vue_shared/components/todo_button.vue`, specify the `title` as
`vue_shared/components/To-do Button`. This will ensure the Storybook navigation maps closely to our internal directory structure.

View File

@ -560,7 +560,7 @@ request, be sure to start the `dont-interrupt-me` job before pushing.
- `.yarn-cache`
- `.assets-compile-cache` (the key includes `${NODE_ENV}` so it's actually two different caches).
1. These cache definitions are composed of [multiple atomic caches](../ci/caching/index.md#use-multiple-caches).
1. Only 6 specific jobs, running in 2-hourly scheduled pipelines, are pushing (i.e. updating) to the caches:
1. Only the following jobs, running in 2-hourly scheduled pipelines, are pushing (i.e. updating) to the caches:
- `update-setup-test-env-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml).
- `update-gitaly-binaries-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml).
- `update-static-analysis-cache`, defined in [`.gitlab/ci/rails.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/rails.gitlab-ci.yml).
@ -568,6 +568,7 @@ request, be sure to start the `dont-interrupt-me` job before pushing.
- `update-assets-compile-production-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
- `update-assets-compile-test-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
- `update-yarn-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
- `update-storybook-yarn-cache`, defined in [`.gitlab/ci/frontend.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/ci/frontend.gitlab-ci.yml).
1. These jobs can also be forced to run in merge requests whose title include `UPDATE CACHE` (this can be useful to warm the caches in a MR that updates the cache keys).
### Artifacts strategy

View File

@ -42,7 +42,7 @@ This template requires:
- A project built in Rails that uses RSpec for testing.
- CI/CD configured to:
- Use a Docker image with Ruby available.
- Use [Pipelines for merge requests](../../../ci/merge_request_pipelines/index.md#configuring-pipelines-for-merge-requests)
- Use [Pipelines for merge requests](../../../ci/merge_request_pipelines/index.md#configure-pipelines-for-merge-requests)
- [Pipelines for Merged Results](../../../ci/merge_request_pipelines/pipelines_for_merged_results/index.md#enable-pipelines-for-merged-results)
enabled in the project settings.
- A Docker image with Ruby available. The template uses `image: ruby:2.6` by default, but you [can override](../../../ci/yaml/includes.md#overriding-external-template-values) this.

View File

@ -5,7 +5,7 @@ module API
class User < UserBasic
include UsersHelper
expose :created_at, if: ->(user, opts) { Ability.allowed?(opts[:current_user], :read_user_profile, user) }
expose :bio, :bio_html, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization, :job_title
expose :bio, :bio_html, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization, :job_title, :pronouns
expose :bot?, as: :bot
expose :work_information do |user|
work_information(user)

View File

@ -136,6 +136,7 @@ module Gitlab
def self.process_name
return 'sidekiq' if Gitlab::Runtime.sidekiq?
return 'action_cable' if Gitlab::Runtime.action_cable?
return 'console' if Gitlab::Runtime.console?
return 'test' if Rails.env.test?

View File

@ -4,7 +4,7 @@
# we alter GlobalID so it will correctly find the record with its new model name.
module Gitlab
module Patch
module GlobalID
module GlobalId
def initialize(gid, options = {})
super

View File

@ -35577,6 +35577,9 @@ msgstr ""
msgid "User was successfully updated."
msgstr ""
msgid "UserAvailability|%{author} %{spanStart}(Busy)%{spanEnd}"
msgstr ""
msgid "UserAvailability|%{author} (Busy)"
msgstr ""

View File

@ -40,6 +40,9 @@
"markdownlint:no-trailing-spaces": "markdownlint --config doc/.markdownlint/markdownlint-no-trailing-spaces.yml",
"markdownlint:no-trailing-spaces:fix": "yarn run markdownlint:no-trailing-spaces --fix",
"postinstall": "node ./scripts/frontend/postinstall.js",
"storybook:install": "yarn --cwd ./storybook install",
"storybook:build": "yarn --cwd ./storybook build",
"storybook:start": "yarn --cwd ./storybook start",
"stylelint-create-utility-map": "node scripts/frontend/stylelint/stylelint-utility-map.js",
"webpack": "NODE_OPTIONS=\"--max-old-space-size=3584\" webpack --config config/webpack.config.js",
"webpack-vendor": "NODE_OPTIONS=\"--max-old-space-size=3584\" webpack --config config/webpack.vendor.config.js",

View File

@ -44,10 +44,10 @@ RSpec.describe Projects::ServicesController do
let(:project) { create(:project) }
context 'with chat notification service' do
let(:service) { project.create_microsoft_teams_service(webhook: 'http://webhook.com') }
let(:service) { project.create_microsoft_teams_integration(webhook: 'http://webhook.com') }
it 'returns success' do
allow_any_instance_of(::MicrosoftTeams::Notifier).to receive(:ping).and_return(true)
allow_next(::MicrosoftTeams::Notifier).to receive(:ping).and_return(true)
put :test, params: project_params

View File

@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe 'User sees user popover', :js do
include Spec::Support::Helpers::Features::NotesHelpers
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user, pronouns: 'they/them') }
let_it_be(:project) { create(:project, :repository, creator: user) }
let(:user) { project.creator }
let(:merge_request) do
create(:merge_request, source_project: project, target_project: project)
end
@ -32,7 +32,7 @@ RSpec.describe 'User sees user popover', :js do
expect(page).to have_css(popover_selector, visible: true)
page.within(popover_selector) do
expect(page).to have_content(user.name)
expect(page).to have_content("#{user.name} (they/them)")
end
end

View File

@ -67,7 +67,14 @@ describe('Global Search Store Actions', () => {
it('calls Api.groupProjects', () => {
actions.fetchProjects({ commit: mockCommit, state });
expect(Api.groupProjects).toHaveBeenCalled();
expect(Api.groupProjects).toHaveBeenCalledWith(
state.query.group_id,
state.query.search,
{
order_by: 'similarity',
},
expect.any(Function),
);
expect(Api.projects).not.toHaveBeenCalled();
});
});

View File

@ -1,7 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { mount } from '@vue/test-utils';
import AssigneeAvatar from '~/sidebar/components/assignees/assignee_avatar.vue';
import CollapsedAssignee from '~/sidebar/components/assignees/collapsed_assignee.vue';
import UserNameWithStatus from '~/sidebar/components/assignees/user_name_with_status.vue';
import userDataMock from '../../user_data_mock';
const TEST_USER = userDataMock();
@ -17,11 +16,8 @@ describe('CollapsedAssignee assignee component', () => {
...props,
};
wrapper = shallowMount(CollapsedAssignee, {
wrapper = mount(CollapsedAssignee, {
propsData,
stubs: {
UserNameWithStatus,
},
});
}

View File

@ -1,25 +1,21 @@
import { GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { mount } from '@vue/test-utils';
import { AVAILABILITY_STATUS } from '~/set_status_modal/utils';
import UserNameWithStatus from '~/sidebar/components/assignees/user_name_with_status.vue';
const name = 'Goku';
const name = 'Administrator';
const containerClasses = 'gl-cool-class gl-over-9000';
describe('UserNameWithStatus', () => {
let wrapper;
function createComponent(props = {}) {
return shallowMount(UserNameWithStatus, {
wrapper = mount(UserNameWithStatus, {
propsData: { name, containerClasses, ...props },
stubs: {
GlSprintf,
},
});
}
beforeEach(() => {
wrapper = createComponent();
createComponent();
});
afterEach(() => {
@ -41,11 +37,39 @@ describe('UserNameWithStatus', () => {
describe(`with availability="${AVAILABILITY_STATUS.BUSY}"`, () => {
beforeEach(() => {
wrapper = createComponent({ availability: AVAILABILITY_STATUS.BUSY });
createComponent({ availability: AVAILABILITY_STATUS.BUSY });
});
it('will render "Busy"', () => {
expect(wrapper.html()).toContain('Goku (Busy)');
expect(wrapper.text()).toContain('(Busy)');
});
});
describe('when user has pronouns set', () => {
const pronouns = 'they/them';
beforeEach(() => {
createComponent({ pronouns });
});
it("renders user's name with pronouns", () => {
expect(wrapper.text()).toMatchInterpolatedText(`${name} (${pronouns})`);
});
});
describe('when user does not have pronouns set', () => {
describe.each`
pronouns
${undefined}
${null}
${''}
${' '}
`('when `pronouns` prop is $pronouns', ({ pronouns }) => {
it("renders only the user's name", () => {
createComponent({ pronouns });
expect(wrapper.text()).toMatchInterpolatedText(name);
});
});
});
});

View File

@ -1,5 +1,5 @@
import { GlSkeletonLoader, GlSprintf, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { GlSkeletonLoader, GlIcon } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { AVAILABILITY_STATUS } from '~/set_status_modal/utils';
import UserNameWithStatus from '~/sidebar/components/assignees/user_name_with_status.vue';
import UserPopover from '~/vue_shared/components/user_popover/user_popover.vue';
@ -13,6 +13,7 @@ const DEFAULT_PROPS = {
bio: null,
workInformation: null,
status: null,
pronouns: 'they/them',
loaded: true,
},
};
@ -30,23 +31,18 @@ describe('User Popover Component', () => {
wrapper.destroy();
});
const findByTestId = (testid) => wrapper.find(`[data-testid="${testid}"]`);
const findUserStatus = () => wrapper.find('.js-user-status');
const findTarget = () => document.querySelector('.js-user-link');
const findUserName = () => wrapper.find(UserNameWithStatus);
const findSecurityBotDocsLink = () => findByTestId('user-popover-bot-docs-link');
const findSecurityBotDocsLink = () => wrapper.findByTestId('user-popover-bot-docs-link');
const createWrapper = (props = {}, options = {}) => {
wrapper = shallowMount(UserPopover, {
wrapper = mountExtended(UserPopover, {
propsData: {
...DEFAULT_PROPS,
target: findTarget(),
...props,
},
stubs: {
GlSprintf,
UserNameWithStatus,
},
...options,
});
};
@ -232,6 +228,12 @@ describe('User Popover Component', () => {
expect(wrapper.text()).not.toContain('(Busy)');
});
it('passes `pronouns` prop to `UserNameWithStatus` component', () => {
createWrapper();
expect(findUserName().props('pronouns')).toBe('they/them');
});
});
describe('bot user', () => {

View File

@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe 'global_id' do
it 'prepends `Gitlab::Patch::GlobalID`' do
expect(GlobalID.ancestors).to include(Gitlab::Patch::GlobalID)
it 'prepends `Gitlab::Patch::GlobalId`' do
expect(GlobalID.ancestors).to include(Gitlab::Patch::GlobalId)
end
it 'patches GlobalID to find aliased models when a deprecation exists' do

View File

@ -9,7 +9,7 @@ RSpec.describe API::Entities::User do
subject { described_class.new(user, current_user: current_user).as_json }
it 'exposes correct attributes' do
expect(subject).to include(:bio, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization, :job_title, :work_information)
expect(subject).to include(:bio, :location, :public_email, :skype, :linkedin, :twitter, :website_url, :organization, :job_title, :work_information, :pronouns)
end
it 'exposes created_at if the current user can read the user profile' do

View File

@ -368,7 +368,7 @@ project:
- drone_ci_integration
- emails_on_push_integration
- pipelines_email_integration
- mattermost_slash_commands_service
- mattermost_slash_commands_integration
- slack_slash_commands_service
- irker_integration
- packagist_integration
@ -378,8 +378,8 @@ project:
- assembla_integration
- asana_integration
- slack_service
- microsoft_teams_service
- mattermost_service
- microsoft_teams_integration
- mattermost_integration
- hangouts_chat_integration
- unify_circuit_service
- buildkite_integration
@ -393,8 +393,8 @@ project:
- bugzilla_integration
- ewm_integration
- external_wiki_integration
- mock_ci_service
- mock_monitoring_service
- mock_ci_integration
- mock_monitoring_integration
- forked_to_members
- forked_from_project
- forks

View File

@ -5,27 +5,29 @@ require 'spec_helper'
RSpec.describe Integrations::MattermostSlashCommands do
it_behaves_like Integrations::BaseSlashCommands
context 'Mattermost API' do
describe 'Mattermost API' do
let(:project) { create(:project) }
let(:service) { project.build_mattermost_slash_commands_service }
let(:integration) { project.build_mattermost_slash_commands_integration }
let(:user) { create(:user) }
before do
session = ::Mattermost::Session.new(nil)
session.base_uri = 'http://mattermost.example.com'
allow_any_instance_of(::Mattermost::Client).to receive(:with_session)
.and_yield(session)
allow(session).to receive(:with_session).and_yield(session)
allow(::Mattermost::Session).to receive(:new).and_return(session)
end
describe '#configure' do
subject do
service.configure(user, team_id: 'abc',
trigger: 'gitlab', url: 'http://trigger.url',
icon_url: 'http://icon.url/icon.png')
integration.configure(user,
team_id: 'abc',
trigger: 'gitlab',
url: 'http://trigger.url',
icon_url: 'http://icon.url/icon.png')
end
context 'the requests succeeds' do
context 'when the request succeeds' do
before do
stub_request(:post, 'http://mattermost.example.com/api/v4/commands')
.with(body: {
@ -48,18 +50,18 @@ RSpec.describe Integrations::MattermostSlashCommands do
)
end
it 'saves the service' do
it 'saves the integration' do
expect { subject }.to change { project.integrations.count }.by(1)
end
it 'saves the token' do
subject
expect(service.reload.token).to eq('token')
expect(integration.reload.token).to eq('token')
end
end
context 'an error is received' do
context 'when an error is received' do
before do
stub_request(:post, 'http://mattermost.example.com/api/v4/commands')
.to_return(
@ -86,10 +88,10 @@ RSpec.describe Integrations::MattermostSlashCommands do
describe '#list_teams' do
subject do
service.list_teams(user)
integration.list_teams(user)
end
context 'the requests succeeds' do
context 'when the request succeeds' do
before do
stub_request(:get, 'http://mattermost.example.com/api/v4/users/me/teams')
.to_return(
@ -104,7 +106,7 @@ RSpec.describe Integrations::MattermostSlashCommands do
end
end
context 'an error is received' do
context 'when an error is received' do
before do
stub_request(:get, 'http://mattermost.example.com/api/v4/users/me/teams')
.to_return(

View File

@ -64,7 +64,8 @@ RSpec.describe Integrations::MicrosoftTeams do
end
it 'specifies the webhook when it is configured' do
expect(::MicrosoftTeams::Notifier).to receive(:new).with(webhook_url).and_return(double(:microsoft_teams_service).as_null_object)
integration = double(:microsoft_teams_integration).as_null_object
expect(::MicrosoftTeams::Notifier).to receive(:new).with(webhook_url).and_return(integration)
chat_service.execute(push_sample_data)
end

View File

@ -36,8 +36,8 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:protected_branches) }
it { is_expected.to have_many(:exported_protected_branches) }
it { is_expected.to have_one(:slack_service) }
it { is_expected.to have_one(:microsoft_teams_service) }
it { is_expected.to have_one(:mattermost_service) }
it { is_expected.to have_one(:microsoft_teams_integration) }
it { is_expected.to have_one(:mattermost_integration) }
it { is_expected.to have_one(:hangouts_chat_integration) }
it { is_expected.to have_one(:unify_circuit_service) }
it { is_expected.to have_one(:webex_teams_service) }
@ -56,7 +56,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_one(:flowdock_integration) }
it { is_expected.to have_one(:assembla_integration) }
it { is_expected.to have_one(:slack_slash_commands_service) }
it { is_expected.to have_one(:mattermost_slash_commands_service) }
it { is_expected.to have_one(:mattermost_slash_commands_integration) }
it { is_expected.to have_one(:buildkite_integration) }
it { is_expected.to have_one(:bamboo_integration) }
it { is_expected.to have_one(:teamcity_service) }
@ -80,6 +80,8 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_one(:error_tracking_setting).class_name('ErrorTracking::ProjectErrorTrackingSetting') }
it { is_expected.to have_one(:project_setting) }
it { is_expected.to have_one(:alerting_setting).class_name('Alerting::ProjectAlertingSetting') }
it { is_expected.to have_one(:mock_ci_integration) }
it { is_expected.to have_one(:mock_monitoring_integration) }
it { is_expected.to have_many(:commit_statuses) }
it { is_expected.to have_many(:ci_pipelines) }
it { is_expected.to have_many(:ci_refs) }

View File

@ -179,10 +179,10 @@ RSpec.describe API::Services do
end
describe 'POST /projects/:id/services/:slug/trigger' do
describe 'Mattermost Service' do
let(:service_name) { 'mattermost_slash_commands' }
describe 'Mattermost integration' do
let(:integration_name) { 'mattermost_slash_commands' }
context 'no service is available' do
context 'when no integration is available' do
it 'returns a not found message' do
post api("/projects/#{project.id}/services/idonotexist/trigger")
@ -191,34 +191,34 @@ RSpec.describe API::Services do
end
end
context 'the service exists' do
context 'when the integration exists' do
let(:params) { { token: 'token' } }
context 'the service is not active' do
context 'when the integration is not active' do
before do
project.create_mattermost_slash_commands_service(
project.create_mattermost_slash_commands_integration(
active: false,
properties: params
)
end
it 'when the service is inactive' do
post api("/projects/#{project.id}/services/#{service_name}/trigger"), params: params
it 'when the integration is inactive' do
post api("/projects/#{project.id}/services/#{integration_name}/trigger"), params: params
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'the service is active' do
context 'when the integration is active' do
before do
project.create_mattermost_slash_commands_service(
project.create_mattermost_slash_commands_integration(
active: true,
properties: params
)
end
it 'returns status 200' do
post api("/projects/#{project.id}/services/#{service_name}/trigger"), params: params
post api("/projects/#{project.id}/services/#{integration_name}/trigger"), params: params
expect(response).to have_gitlab_http_status(:ok)
end
@ -226,7 +226,7 @@ RSpec.describe API::Services do
context 'when the project can not be found' do
it 'returns a generic 404' do
post api("/projects/404/services/#{service_name}/trigger"), params: params
post api("/projects/404/services/#{integration_name}/trigger"), params: params
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response["message"]).to eq("404 Service Not Found")
@ -254,29 +254,29 @@ RSpec.describe API::Services do
end
end
describe 'Mattermost service' do
let(:service_name) { 'mattermost' }
describe 'Mattermost integration' do
let(:integration_name) { 'mattermost' }
let(:params) do
{ webhook: 'https://hook.example.com', username: 'username' }
end
before do
project.create_mattermost_service(
project.create_mattermost_integration(
active: true,
properties: params
)
end
it 'accepts a username for update' do
put api("/projects/#{project.id}/services/#{service_name}", user), params: params.merge(username: 'new_username')
put api("/projects/#{project.id}/services/#{integration_name}", user), params: params.merge(username: 'new_username')
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['properties']['username']).to eq('new_username')
end
end
describe 'Microsoft Teams service' do
let(:service_name) { 'microsoft-teams' }
describe 'Microsoft Teams integration' do
let(:integration_name) { 'microsoft-teams' }
let(:params) do
{
webhook: 'https://hook.example.com',
@ -286,21 +286,23 @@ RSpec.describe API::Services do
end
before do
project.create_microsoft_teams_service(
project.create_microsoft_teams_integration(
active: true,
properties: params
)
end
it 'accepts branches_to_be_notified for update' do
put api("/projects/#{project.id}/services/#{service_name}", user), params: params.merge(branches_to_be_notified: 'all')
put api("/projects/#{project.id}/services/#{integration_name}", user),
params: params.merge(branches_to_be_notified: 'all')
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['properties']['branches_to_be_notified']).to eq('all')
end
it 'accepts notify_only_broken_pipelines for update' do
put api("/projects/#{project.id}/services/#{service_name}", user), params: params.merge(notify_only_broken_pipelines: true)
put api("/projects/#{project.id}/services/#{integration_name}", user),
params: params.merge(notify_only_broken_pipelines: true)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['properties']['notify_only_broken_pipelines']).to eq(true)

View File

@ -30,7 +30,7 @@ RSpec.describe Ci::RetryBuildService do
project.add_reporter(reporter)
end
clone_accessors = described_class.clone_accessors
clone_accessors = described_class.clone_accessors.without(described_class.extra_accessors)
reject_accessors =
%i[id status user token token_encrypted coverage trace runner
@ -98,7 +98,7 @@ RSpec.describe Ci::RetryBuildService do
end
clone_accessors.each do |attribute|
it "clones #{attribute} build attribute" do
it "clones #{attribute} build attribute", :aggregate_failures do
expect(attribute).not_to be_in(forbidden_associations), "association #{attribute} must be `belongs_to`"
expect(build.send(attribute)).not_to be_nil
expect(new_build.send(attribute)).not_to be_nil
@ -134,7 +134,7 @@ RSpec.describe Ci::RetryBuildService do
end
end
it 'has correct number of known attributes' do
it 'has correct number of known attributes', :aggregate_failures do
processed_accessors = clone_accessors + reject_accessors
known_accessors = processed_accessors + ignore_accessors
@ -146,9 +146,10 @@ RSpec.describe Ci::RetryBuildService do
Ci::Build.attribute_names.map(&:to_sym) +
Ci::Build.attribute_aliases.keys.map(&:to_sym) +
Ci::Build.reflect_on_all_associations.map(&:name) +
[:tag_list, :needs_attributes]
current_accessors << :secrets if Gitlab.ee?
[:tag_list, :needs_attributes] -
# ee-specific accessors should be tested in ee/spec/services/ci/retry_build_service_spec.rb instead
described_class.extra_accessors -
[:dast_site_profiles_build, :dast_scanner_profiles_build] # join tables
current_accessors.uniq!

View File

@ -66,14 +66,14 @@ RSpec.shared_examples Integrations::BaseSlashCommands do
}
end
let(:service) do
project.create_mattermost_slash_commands_service(
let(:integration) do
project.create_mattermost_slash_commands_integration(
properties: { token: 'token' }
)
end
it 'generates the url' do
response = service.trigger(params)
response = integration.trigger(params)
expect(response[:text]).to start_with(':wave: Hi there!')
end

2
storybook/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
node_modules/
public/

10
storybook/config/main.js Normal file
View File

@ -0,0 +1,10 @@
/* eslint-disable import/no-commonjs */
const IS_EE = require('../../config/helpers/is_ee_env');
module.exports = {
stories: [
'../../app/assets/javascripts/**/*.stories.js',
IS_EE && '../../ee/app/assets/javascripts/**/*.stories.js',
].filter(Boolean),
addons: ['@storybook/addon-essentials', '@storybook/addon-a11y'],
};

View File

@ -0,0 +1,7 @@
const stylesheetsRequireCtx = require.context(
'../../app/assets/stylesheets',
true,
/application\.scss$/,
);
stylesheetsRequireCtx('./application.scss');

View File

@ -0,0 +1,104 @@
/* eslint-disable no-param-reassign */
const { statSync } = require('fs');
const path = require('path');
const sass = require('node-sass'); // eslint-disable-line import/no-unresolved
const { buildIncludePaths, resolveGlobUrl } = require('node-sass-magic-importer/dist/toolbox'); // eslint-disable-line import/no-unresolved
const webpack = require('webpack');
const gitlabWebpackConfig = require('../../config/webpack.config.js');
const ROOT = path.resolve(__dirname, '../../');
const TRANSPARENT_1X1_PNG =
'url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==)';
const SASS_INCLUDE_PATHS = [
'app/assets/stylesheets',
'app/assets/stylesheets/_ee',
'ee/app/assets/stylesheets',
'ee/app/assets/stylesheets/_ee',
'node_modules',
].map((p) => path.resolve(ROOT, p));
/**
* Custom importer for node-sass, used when LibSass encounters the `@import` directive.
* Doc source: https://github.com/sass/node-sass#importer--v200---experimental
* @param {*} url the path in import as-is, which LibSass encountered.
* @param {*} prev the previously resolved path.
* @returns {Object | null} the new import string.
*/
function sassSmartImporter(url, prev) {
const nodeSassOptions = this.options;
const includePaths = buildIncludePaths(nodeSassOptions.includePaths, prev).filter(
(includePath) => !includePath.includes('node_modules'),
);
// GitLab extensively uses glob-style import paths, but
// Sass doesn't support glob-style URLs out of the box.
// Here, we try and resolve the glob URL.
// If it resolves, we update the @import statement with the resolved path.
const filePaths = resolveGlobUrl(url, includePaths);
if (filePaths) {
const contents = filePaths
.filter((file) => statSync(file).isFile())
.map((x) => `@import '${x}';`)
.join(`\n`);
return { contents };
}
return null;
}
const sassLoaderOptions = {
functions: {
'image-url($url)': function sassImageUrlStub() {
return new sass.types.String(TRANSPARENT_1X1_PNG);
},
'asset_path($url)': function sassAssetPathStub() {
return new sass.types.String(TRANSPARENT_1X1_PNG);
},
'asset_url($url)': function sassAssetUrlStub() {
return new sass.types.String(TRANSPARENT_1X1_PNG);
},
'url($url)': function sassUrlStub() {
return new sass.types.String(TRANSPARENT_1X1_PNG);
},
},
includePaths: SASS_INCLUDE_PATHS,
importer: sassSmartImporter,
};
module.exports = function storybookWebpackConfig({ config }) {
// Add any missing extensions from the main GitLab webpack config
config.resolve.extensions = Array.from(
new Set([...config.resolve.extensions, ...gitlabWebpackConfig.resolve.extensions]),
);
// Replace any Storybook-defined CSS loaders with our custom one.
config.module.rules = [
...config.module.rules.filter((r) => !r.test.test('.css')),
{
test: /\.s?css$/,
exclude: /typescale\/\w+_demo\.scss$/, // skip typescale demo stylesheets
loaders: [
'style-loader',
'css-loader',
{
loader: 'sass-loader',
options: sassLoaderOptions,
},
],
},
];
// Silence webpack warnings about moment/pikaday not being able to resolve.
config.plugins.push(new webpack.IgnorePlugin(/moment/, /pikaday/));
// Add any missing aliases from the main GitLab webpack config
Object.assign(config.resolve.alias, gitlabWebpackConfig.resolve.alias);
// The main GitLab project aliases this `icons.svg` file to app/assets/javascripts/lib/utils/icons_path.js,
// which depends on the existence of a global `gon` variable.
// By deleting the alias, imports of this path will resolve as expected.
delete config.resolve.alias['@gitlab/svgs/dist/icons.svg'];
return config;
};

20
storybook/package.json Normal file
View File

@ -0,0 +1,20 @@
{
"private": true,
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "start-storybook -p 9002 -c config",
"build": "build-storybook -c config -o public"
},
"dependencies": {},
"devDependencies": {
"@storybook/addon-a11y": "^6.2.9",
"@storybook/addon-actions": "^6.2.9",
"@storybook/addon-controls": "^6.2.9",
"@storybook/addon-essentials": "^6.2.9",
"@storybook/vue": "6.2.9",
"node-sass": "^4.14.1",
"node-sass-magic-importer": "^5.3.2",
"postcss-loader": "3.0.0",
"sass-loader": "^7.1.0"
}
}

10766
storybook/yarn.lock Normal file

File diff suppressed because it is too large Load Diff