Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-03-21 06:11:36 +00:00
parent bada454e15
commit fa44915648
36 changed files with 945 additions and 159 deletions

View File

@ -167,7 +167,6 @@ gem 'rack-cors', '~> 2.0.1', require: 'rack/cors', feature_category: :shared
gem 'graphql', '2.4.11', path: 'vendor/gems/graphql', feature_category: :api
gem 'graphql-docs', '~> 5.0.0', group: [:development, :test], feature_category: :api
gem 'apollo_upload_server', '~> 2.1.6', feature_category: :api
gem 'graphlient', '~> 0.8.0', feature_category: :importers # Used by BulkImport feature (group::import)
# Cells
gem 'gitlab-topology-service-client', '~> 0.1',

View File

@ -292,7 +292,6 @@
{"name":"grape-swagger","version":"2.1.2","platform":"ruby","checksum":"8ad7bd53c8baee704575808875dba8c08d269c457db3cf8f1b8a2a1dbf827294"},
{"name":"grape-swagger-entity","version":"0.5.5","platform":"ruby","checksum":"a2a0eb28964b1a56775a3571358a9f0a300b703dbaee1ee535adb2a7bed7ece6"},
{"name":"grape_logging","version":"1.8.4","platform":"ruby","checksum":"efcc3e322dbd5d620a68f078733b7db043cf12680144cd03c982f14115c792d1"},
{"name":"graphlient","version":"0.8.0","platform":"ruby","checksum":"98c408da1d083454e9f5e274f3b0b6261e2a0c2b5f2ed7b3ef9441d46f8e7cb1"},
{"name":"graphlyte","version":"1.0.0","platform":"ruby","checksum":"b5af4ab67dde6e961f00ea1c18f159f73b52ed11395bb4ece297fe628fa1804d"},
{"name":"graphql-client","version":"0.23.0","platform":"ruby","checksum":"f238b8e451676baad06bd15f95396e018192243dcf12c4e6d13fb41d9a2babc1"},
{"name":"graphql-docs","version":"5.0.0","platform":"ruby","checksum":"76baca6e5a803a4b6a9fbbbfdbf16742b7c4c546c8592b6e1a7aa4e79e562d04"},

View File

@ -953,9 +953,6 @@ GEM
grape_logging (1.8.4)
grape
rack
graphlient (0.8.0)
faraday (~> 2.0)
graphql-client
graphlyte (1.0.0)
graphql-client (0.23.0)
activesupport (>= 3.0)
@ -2161,7 +2158,6 @@ DEPENDENCIES
grape-swagger (~> 2.1.2)
grape-swagger-entity (~> 0.5.5)
grape_logging (~> 1.8, >= 1.8.4)
graphlient (~> 0.8.0)
graphlyte (~> 1.0.0)
graphql (= 2.4.11)!
graphql-docs (~> 5.0.0)

View File

@ -292,7 +292,6 @@
{"name":"grape-swagger","version":"2.1.2","platform":"ruby","checksum":"8ad7bd53c8baee704575808875dba8c08d269c457db3cf8f1b8a2a1dbf827294"},
{"name":"grape-swagger-entity","version":"0.5.5","platform":"ruby","checksum":"a2a0eb28964b1a56775a3571358a9f0a300b703dbaee1ee535adb2a7bed7ece6"},
{"name":"grape_logging","version":"1.8.4","platform":"ruby","checksum":"efcc3e322dbd5d620a68f078733b7db043cf12680144cd03c982f14115c792d1"},
{"name":"graphlient","version":"0.8.0","platform":"ruby","checksum":"98c408da1d083454e9f5e274f3b0b6261e2a0c2b5f2ed7b3ef9441d46f8e7cb1"},
{"name":"graphlyte","version":"1.0.0","platform":"ruby","checksum":"b5af4ab67dde6e961f00ea1c18f159f73b52ed11395bb4ece297fe628fa1804d"},
{"name":"graphql-client","version":"0.23.0","platform":"ruby","checksum":"f238b8e451676baad06bd15f95396e018192243dcf12c4e6d13fb41d9a2babc1"},
{"name":"graphql-docs","version":"5.0.0","platform":"ruby","checksum":"76baca6e5a803a4b6a9fbbbfdbf16742b7c4c546c8592b6e1a7aa4e79e562d04"},

View File

@ -965,9 +965,6 @@ GEM
grape_logging (1.8.4)
grape
rack
graphlient (0.8.0)
faraday (~> 2.0)
graphql-client
graphlyte (1.0.0)
graphql-client (0.23.0)
activesupport (>= 3.0)
@ -2195,7 +2192,6 @@ DEPENDENCIES
grape-swagger (~> 2.1.2)
grape-swagger-entity (~> 0.5.5)
grape_logging (~> 1.8, >= 1.8.4)
graphlient (~> 0.8.0)
graphlyte (~> 1.0.0)
graphql (= 2.4.11)!
graphql-docs (~> 5.0.0)

View File

@ -7,8 +7,11 @@ import {
TOKEN_TYPE_STATUS,
TOKEN_TITLE_JOBS_RUNNER_TYPE,
TOKEN_TYPE_JOBS_RUNNER_TYPE,
TOKEN_TITLE_JOBS_SOURCE,
TOKEN_TYPE_JOBS_SOURCE,
} from '~/vue_shared/components/filtered_search_bar/constants';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import JobSourceToken from './tokens/job_source_token.vue';
import JobStatusToken from './tokens/job_status_token.vue';
import JobRunnerTypeToken from './tokens/job_runner_type_token.vue';
@ -37,6 +40,17 @@ export default {
},
];
if (this.glFeatures.populateAndUseBuildSourceTable) {
tokens.push({
type: TOKEN_TYPE_JOBS_SOURCE,
title: TOKEN_TITLE_JOBS_SOURCE,
icon: 'trigger-source',
unique: true,
token: JobSourceToken,
operators: OPERATORS_IS,
});
}
if (this.glFeatures.adminJobsFilterRunnerType) {
tokens.push({
type: TOKEN_TYPE_JOBS_RUNNER_TYPE,
@ -61,6 +75,18 @@ export default {
value: { data: queryStringValue, operator: OPERATOR_IS },
},
];
case 'sources':
if (!this.glFeatures.populateAndUseBuildSourceTable) {
return acc;
}
return [
...acc,
{
type: TOKEN_TYPE_JOBS_SOURCE,
value: { data: queryStringValue, operator: OPERATOR_IS },
},
];
case 'runnerTypes':
if (!this.glFeatures.adminJobsFilterRunnerType) {
return acc;

View File

@ -0,0 +1,52 @@
import { s__ } from '~/locale';
export const JOB_SOURCES = [
{
text: s__('JobSource|Push'),
value: 'PUSH',
},
{
text: s__('JobSource|Web'),
value: 'WEB',
},
{
text: s__('JobSource|Trigger'),
value: 'TRIGGER',
},
{
text: s__('JobSource|Schedule'),
value: 'SCHEDULE',
},
{
text: s__('JobSource|API'),
value: 'API',
},
{
text: s__('JobSource|External'),
value: 'EXTERNAL',
},
{
text: s__('JobSource|Pipeline'),
value: 'PIPELINE',
},
{
text: s__('JobSource|Chat'),
value: 'CHAT',
},
{
text: s__('JobSource|Web IDE'),
value: 'WEBIDE',
},
{
text: s__('JobSource|Merge Request'),
value: 'MERGE_REQUEST_EVENT',
},
{
text: s__('JobSource|External Pull Request'),
value: 'EXTERNAL_PULL_REQUEST_EVENT',
},
{
text: s__('JobSource|Parent Pipeline'),
value: 'PARENT_PIPELINE',
},
];

View File

@ -0,0 +1,47 @@
<script>
import { GlFilteredSearchToken, GlFilteredSearchSuggestion } from '@gitlab/ui';
import { JOB_SOURCES } from 'ee_else_ce/ci/common/private/jobs_filtered_search/tokens/constants';
export default {
JOB_SOURCES,
components: {
GlFilteredSearchToken,
GlFilteredSearchSuggestion,
},
props: {
config: {
type: Object,
required: true,
},
value: {
type: Object,
required: true,
},
},
computed: {
activeSource() {
return JOB_SOURCES.find((source) => source.value === this.value.data) || {};
},
},
};
</script>
<template>
<gl-filtered-search-token v-bind="{ ...$props, ...$attrs }" v-on="$listeners">
<template #view>
<div class="gl-flex gl-items-center">
<span data-testid="job-source-text">{{ activeSource.text }}</span>
</div>
</template>
<template #suggestions>
<gl-filtered-search-suggestion
v-for="source in $options.JOB_SOURCES"
:key="source.value"
:value="source.value"
>
{{ source.text }}
</gl-filtered-search-suggestion>
</template>
</gl-filtered-search-token>
</template>

View File

@ -5,6 +5,7 @@ query getJobs(
$after: String
$before: String
$statuses: [CiJobStatus!]
$sources: [CiJobSource!]
$name: String
) {
project(fullPath: $fullPath) {
@ -15,6 +16,7 @@ query getJobs(
after: $after
before: $before
statuses: $statuses
sources: $sources
name: $name
) {
pageInfo {

View File

@ -1,7 +1,12 @@
query getJobsCount($fullPath: ID!, $statuses: [CiJobStatus!], $name: String) {
query getJobsCount(
$fullPath: ID!
$statuses: [CiJobStatus!]
$sources: [CiJobSource!]
$name: String
) {
project(fullPath: $fullPath) {
id
jobs(statuses: $statuses, name: $name) {
jobs(statuses: $statuses, sources: $sources, name: $name) {
count
}
}

View File

@ -133,8 +133,10 @@ export default {
resetRequestData() {
if (this.glFeatures.feSearchBuildByName) {
this.requestData = { statuses: null, name: null };
} else if (this.glFeatures.populateAndUseBuildSourceTable) {
this.requestData = { statuses: null, sources: null };
} else {
this.requestData = { statuses: null };
this.requestData = { statuses: null, sources: null };
}
},
resetPagination() {
@ -182,6 +184,11 @@ export default {
if (filter.type === 'status') {
this.requestData.statuses = filter.value.data;
}
// Technically, this shouldn't need to check the feature flag because the filter won't be available if the flag is disabled
if (this.glFeatures.populateAndUseBuildSourceTable && filter.type === 'jobs-source') {
this.requestData.sources = filter.value.data;
}
});
this.$apollo.queries.jobs.refetch({

View File

@ -81,6 +81,7 @@ export const TOKEN_TITLE_REVIEWER = s__('SearchToken|Reviewer');
export const TOKEN_TITLE_SOURCE_BRANCH = __('Source Branch');
export const TOKEN_TITLE_STATUS = __('Status');
export const TOKEN_TITLE_JOBS_RUNNER_TYPE = s__('Job|Runner type');
export const TOKEN_TITLE_JOBS_SOURCE = s__('Job|Source');
export const TOKEN_TITLE_TARGET_BRANCH = __('Target Branch');
export const TOKEN_TITLE_TYPE = __('Type');
export const TOKEN_TITLE_VERSION = __('Version');
@ -122,6 +123,7 @@ export const TOKEN_TYPE_REVIEWER = 'reviewer';
export const TOKEN_TYPE_SOURCE_BRANCH = 'source-branch';
export const TOKEN_TYPE_STATUS = 'status';
export const TOKEN_TYPE_JOBS_RUNNER_TYPE = 'jobs-runner-type';
export const TOKEN_TYPE_JOBS_SOURCE = 'jobs-source';
export const TOKEN_TYPE_TARGET_BRANCH = 'target-branch';
export const TOKEN_TYPE_TYPE = 'type';
export const TOKEN_TYPE_VERSION = 'version';

View File

@ -24,6 +24,7 @@ class Projects::JobsController < Projects::ApplicationController
before_action :verify_proxy_request!, only: :proxy_websocket_authorize
before_action :reject_if_build_artifacts_size_refreshing!, only: [:erase]
before_action :push_filter_by_name, only: [:index]
before_action :push_populate_and_use_build_source_table, only: [:index]
before_action :push_force_cancel_build, only: [:cancel, :show]
layout 'project'
@ -301,6 +302,10 @@ class Projects::JobsController < Projects::ApplicationController
push_frontend_feature_flag(:fe_search_build_by_name, @project)
end
def push_populate_and_use_build_source_table
push_frontend_feature_flag(:populate_and_use_build_source_table, @project)
end
def push_force_cancel_build
push_frontend_feature_flag(:force_cancel_build, current_user)
end

View File

@ -5,5 +5,5 @@ introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/164210
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/480824
milestone: '17.4'
group: group::custom models
type: experiment
default_enabled: false
type: beta
default_enabled: true

View File

@ -8,15 +8,6 @@ description: TODO
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/commit/046b28312704f3131e72dcd2dbdacc5264d4aa62
milestone: '8.0'
gitlab_schema: gitlab_ci
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: trigger_id
table: ci_triggers
sharding_key: project_id
belongs_to: trigger
foreign_key_name: fk_b8ec8b7245
desired_sharding_key_migration_job_name: BackfillCiTriggerRequestsProjectId
table_size: small
sharding_key:
project_id: projects

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
class AddCiTriggerRequestsProjectIdNotNull < Gitlab::Database::Migration[2.2]
milestone '17.11'
disable_ddl_transaction!
def up
add_not_null_constraint :ci_trigger_requests, :project_id
end
def down
remove_not_null_constraint :ci_trigger_requests, :project_id
end
end

View File

@ -0,0 +1,52 @@
# frozen_string_literal: true
class RemoveBrokenFkForPCiBuildsAndPCiPipelines < Gitlab::Database::Migration[2.2]
include Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers
include Gitlab::Database::MigrationHelpers::WraparoundAutovacuum
milestone '17.11'
disable_ddl_transaction!
SOURCE_TABLE_NAME = :p_ci_builds
TARGET_TABLE_NAME = :p_ci_pipelines
COLUMN = :auto_canceled_by_id
TARGET_COLUMN = :id
PARTITION_COLUMN = :auto_canceled_by_partition_id
PARTITION_TARGET_COLUMN = :partition_id
FK_NAME = :fk_a2141b1522_p
def up
# NOTE: autovacuum is run on partitions
# ideally we should include the gitlab_partitions_dynamic.partitions
# but these two partitions are the one that take long time for now
return unless can_execute_on?(:ci_pipelines, :ci_builds)
with_lock_retries do
remove_foreign_key_if_exists(
SOURCE_TABLE_NAME,
TARGET_TABLE_NAME,
name: FK_NAME,
reverse_lock_order: true
)
end
end
def down
# NOTE: autovacuum is run on partitions
# ideally we should include the gitlab_partitions_dynamic.partitions
# but these two partitions are the one that take long time for now
return unless can_execute_on?(:ci_pipelines, :ci_builds)
add_concurrent_partitioned_foreign_key(
SOURCE_TABLE_NAME,
TARGET_TABLE_NAME,
column: [PARTITION_COLUMN, COLUMN],
target_column: [PARTITION_TARGET_COLUMN, TARGET_COLUMN],
reverse_lock_order: true,
on_update: :cascade,
on_delete: :nullify,
name: FK_NAME,
validate: true
)
end
end

View File

@ -0,0 +1,52 @@
# frozen_string_literal: true
class SyncFkForPCiStagesPipelineIdReferencingPCiPipelines < Gitlab::Database::Migration[2.2]
include Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers
milestone '17.11'
disable_ddl_transaction!
SOURCE_TABLE_NAME = :p_ci_stages
TARGET_TABLE_NAME = :p_ci_pipelines
COLUMN = :pipeline_id
TARGET_COLUMN = :id
PARTITION_COLUMN = :partition_id
FK_NAME = :fk_rails_5d4d96d44b_p
def up
add_concurrent_partitioned_foreign_key(
SOURCE_TABLE_NAME,
TARGET_TABLE_NAME,
column: [PARTITION_COLUMN, COLUMN],
target_column: [PARTITION_COLUMN, TARGET_COLUMN],
reverse_lock_order: true,
on_update: :cascade,
on_delete: :cascade,
name: FK_NAME,
validate: true
)
end
def down
with_lock_retries do
remove_foreign_key_if_exists(
SOURCE_TABLE_NAME,
TARGET_TABLE_NAME,
name: FK_NAME,
reverse_lock_order: true
)
end
add_concurrent_partitioned_foreign_key(
SOURCE_TABLE_NAME,
TARGET_TABLE_NAME,
column: [PARTITION_COLUMN, COLUMN],
target_column: [PARTITION_COLUMN, TARGET_COLUMN],
reverse_lock_order: true,
on_update: :cascade,
on_delete: :cascade,
name: FK_NAME,
validate: false
)
end
end

View File

@ -0,0 +1 @@
d51da0635b6af408c8c51eed00063fd8aa8bb276c1ee69fca15a3634e10abea9

View File

@ -0,0 +1 @@
04ae95e08461c43276d765786d862426e59c0af7c1866daed36d440cf1bdb06f

View File

@ -0,0 +1 @@
4a490d1f0e5335255f53ab48b1efcdab327fdd070fceb4f74baf3de5a3a11d1f

View File

@ -11764,7 +11764,8 @@ CREATE TABLE ci_trigger_requests (
created_at timestamp without time zone,
updated_at timestamp without time zone,
commit_id bigint,
project_id bigint
project_id bigint,
CONSTRAINT check_66cc8518b2 CHECK ((project_id IS NOT NULL))
);
CREATE SEQUENCE ci_trigger_requests_id_seq
@ -40922,9 +40923,6 @@ ALTER TABLE ONLY ml_candidates
ALTER TABLE ONLY subscription_add_on_purchases
ADD CONSTRAINT fk_a1db288990 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE p_ci_builds
ADD CONSTRAINT fk_a2141b1522_p FOREIGN KEY (auto_canceled_by_partition_id, auto_canceled_by_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE SET NULL;
ALTER TABLE ONLY protected_environment_approval_rules
ADD CONSTRAINT fk_a3cc825836 FOREIGN KEY (protected_environment_project_id) REFERENCES projects(id) ON DELETE CASCADE;
@ -42458,8 +42456,8 @@ ALTER TABLE ONLY ci_running_builds
ALTER TABLE ONLY merge_request_approval_metrics
ADD CONSTRAINT fk_rails_5cb1ca73f8 FOREIGN KEY (merge_request_id) REFERENCES merge_requests(id) ON DELETE CASCADE;
ALTER TABLE ONLY ci_stages
ADD CONSTRAINT fk_rails_5d4d96d44b_p FOREIGN KEY (partition_id, pipeline_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE NOT VALID;
ALTER TABLE p_ci_stages
ADD CONSTRAINT fk_rails_5d4d96d44b_p FOREIGN KEY (partition_id, pipeline_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY targeted_message_namespaces
ADD CONSTRAINT fk_rails_5d78dba870 FOREIGN KEY (targeted_message_id) REFERENCES targeted_messages(id) ON DELETE CASCADE;

View File

@ -78,6 +78,7 @@ module Gitlab
validate_job_needs!(name, job)
validate_dynamic_child_pipeline_dependencies!(name, job)
validate_job_environment!(name, job)
validate_job_pages_publish!(name, job)
end
def validate_job_stage!(name, job)
@ -191,6 +192,13 @@ module Gitlab
end
end
def validate_job_pages_publish!(name, job)
return unless job[:pages].is_a?(Hash)
return unless job.key?(:publish) && job[:pages].key?(:publish)
error!("#{name} job: use either #{name}:publish or #{name}:pages:publish")
end
def check_circular_dependencies
jobs = @jobs.values.to_h do |job|
name = job[:name].to_s

View File

@ -8,20 +8,17 @@ module Gitlab
# prevention can take many hours to complete on some of the tables and this in
# turn blocks the post deployment migrations pipeline.
# Intended workflow for this helper:
# 1. Introduce a migration that is guarded with this helper
# 2. Check that the migration was successfully executed on .com
# 3. Introduce the migration again for self-managed.
#
# 1. Introduce a migration that is guarded with this helper for self-managed
# so that the tests and everything else depending on it can reflect the changes
# 2. Introduce the migration again for .com
# so that we can keep trying until it succeeds on .com
def can_execute_on?(*tables)
return false unless Gitlab.com_except_jh? || Gitlab.dev_or_test_env?
return true unless Gitlab.com_except_jh?
return true unless wraparound_prevention_on_tables?(tables)
if wraparound_prevention_on_tables?(tables)
Gitlab::AppLogger.info(message: "Wraparound prevention vacuum detected", class: self.class)
say "Wraparound prevention vacuum detected, skipping migration"
return false
end
true
Gitlab::AppLogger.info(message: "Wraparound prevention vacuum detected", class: self.class)
say "Wraparound prevention vacuum detected, skipping migration"
false
end
def wraparound_prevention_on_tables?(tables)

View File

@ -33340,6 +33340,51 @@ msgstr ""
msgid "JobAssistant|week(s)"
msgstr ""
msgid "JobSource|API"
msgstr ""
msgid "JobSource|Chat"
msgstr ""
msgid "JobSource|External"
msgstr ""
msgid "JobSource|External Pull Request"
msgstr ""
msgid "JobSource|Merge Request"
msgstr ""
msgid "JobSource|On-Demand DAST Scan"
msgstr ""
msgid "JobSource|On-Demand DAST Validation"
msgstr ""
msgid "JobSource|Parent Pipeline"
msgstr ""
msgid "JobSource|Pipeline"
msgstr ""
msgid "JobSource|Push"
msgstr ""
msgid "JobSource|Schedule"
msgstr ""
msgid "JobSource|Security Policy"
msgstr ""
msgid "JobSource|Trigger"
msgstr ""
msgid "JobSource|Web"
msgstr ""
msgid "JobSource|Web IDE"
msgstr ""
msgid "JobToken|Apply permissions that grant access to individual resources."
msgstr ""
@ -33598,6 +33643,9 @@ msgstr ""
msgid "Job|Something went wrong while loading the log."
msgstr ""
msgid "Job|Source"
msgstr ""
msgid "Job|Status"
msgstr ""

View File

@ -4,25 +4,227 @@
# This involves installing an Ingress Controller(Ingress Nginx) and installing GitLab Workspaces Proxy.
#
# It uses the following environment variables
# $INGRESS_NGINX_HELM_CHART_VERSION - Ingress Nginx Helm Chart version.
# $GITLAB_WORKSPACES_PROXY_HELM_CHART_VERSION - GitLab Workspaces Proxy Helm Chart version.
# $GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAME - GitLab Workspaces Proxy helm release name
# $GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE - GitLab Workspaces Proxy helm release namespace
# $GITLAB_WORKSPACES_PROXY_DOMAIN - GitLab Workspaces Proxy domain
# $GITLAB_WORKSPACES_PROXY_WILDCARD_DOMAIN - GitLab Workspaces Proxy wildcard domain where workspaces will be on
# $GITLAB_WORKSPACES_PROXY_REDIRECT_URI - GitLab Workspaces Proxy redirect uri for OAuth application
# $GITLAB_WORKSPACES_PROXY_SIGNING_KEY - GitLab Workspaces Proxy signing key
# $GITLAB_WORKSPACES_PROXY_SSH_HOST_KEY - GitLab Workspaces Proxy SSH host key
# $GITLAB_WORKSPACES_PROXY_TLS_CERT_FILE - GitLab Workspaces Proxy TLS Certificate file
# $GITLAB_WORKSPACES_PROXY_TLS_KEY_FILE - GitLab Workspaces Proxy TLS Key file
# $GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_CERT_FILE - GitLab Workspaces Proxy TLS Certificate file
# $GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_KEY_FILE - GitLab Workspaces Proxy TLS Key file
# $GITLAB_URL - GitLab URL
# $CLIENT_ID - OAuth Client ID used in GitLab Workspaces Proxy.
# $CLIENT_SECRET - OAuth Client Secret used in GitLab Workspaces Proxy.
# $GITLAB_WORKSPACES_PROXY_HELM_CHART_VERSION - GitLab Workspaces Proxy Helm Chart version.
# $INGRESS_NGINX_HELM_CHART_VERSION - Ingress Nginx Helm Chart version.
#
# If this is the first time this script in being run in the Kubernetes cluster, you need to export the environment
# variables listed above. Use the following command:
#
# CLIENT_ID="UPDATE_ME" CLIENT_SECRET="UPDATE_ME" ./scripts/remote_development/workspaces_kubernetes_setup.sh
#
# If this is the first time this script in being run in an environment which requires non-default GitLab URL or the GitLab Workspaces Proxy domains
#
# GITLAB_WORKSPACES_PROXY_DOMAIN="UPDATE_ME" GITLAB_WORKSPACES_PROXY_WILDCARD_DOMAIN="UPDATE_ME" GITLAB_URL="UPDATE_ME" CLIENT_ID="UPDATE_ME" CLIENT_SECRET="UPDATE_ME" ./scripts/remote_development/workspaces_kubernetes_setup.sh
#
# Any subsequent invocation would fetch the value from the previous helm release and thus there is no need to export
# the environment variables listed above. Use the following command:
#
# ./scripts/remote_development/workspaces_kubernetes_setup.sh
# =====================================
# clean up
# =====================================
ROOT_DIR="${HOME}/.gitlab-workspaces"
rm -rf "${ROOT_DIR}"
mkdir -p "${ROOT_DIR}"
# =====================================
# set defaults
# =====================================
GITLAB_WORKSPACES_PROXY_CONFIG_SECRET="gitlab-workspaces-proxy-config"
GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAME="gitlab-workspaces-proxy"
GITLAB_WORKSPACES_PROXY_TLS_SECRET="gitlab-workspace-proxy-tls"
GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_SECRET="gitlab-workspace-proxy-wildcard-tls"
# =====================================
# validate user inputs and set defaults
# =====================================
if [ -z "${INGRESS_NGINX_HELM_CHART_VERSION}" ]; then
echo "INGRESS_NGINX_HELM_CHART_VERSION is not explicitly set. Using default."
INGRESS_NGINX_HELM_CHART_VERSION="4.12.0"
fi
if [ -z "${GITLAB_WORKSPACES_PROXY_HELM_CHART_VERSION}" ]; then
echo "GITLAB_WORKSPACES_PROXY_HELM_CHART_VERSION is not explicitly set. Using default."
GITLAB_WORKSPACES_PROXY_HELM_CHART_VERSION="0.1.17"
fi
if [ -z "${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" ]; then
echo "GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE is not explicitly set. Using default."
GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE="gitlab-workspaces"
fi
if [ -z "${GITLAB_WORKSPACES_PROXY_DOMAIN}" ]; then
echo "GITLAB_WORKSPACES_PROXY_DOMAIN is not explicitly set. Fetching the value from existing helm release."
GITLAB_WORKSPACES_PROXY_DOMAIN=$(
kubectl get ingress "${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAME}" --namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" \
--output go-template="{{ range .spec.tls }}{{ if eq .secretName \"${GITLAB_WORKSPACES_PROXY_TLS_SECRET}\" }}{{ index .hosts 0 }}{{ break }}{{ end }}{{ end }}"
)
if [ -z "${GITLAB_WORKSPACES_PROXY_DOMAIN}" ]; then
echo "Unable to fetch the value from existing helm release. Using default."
GITLAB_WORKSPACES_PROXY_DOMAIN="workspaces.localdev.me"
fi
fi
if [ -z "${GITLAB_WORKSPACES_PROXY_WILDCARD_DOMAIN}" ]; then
echo "GITLAB_WORKSPACES_PROXY_WILDCARD_DOMAIN is not explicitly set. Fetching the value from existing helm release."
GITLAB_WORKSPACES_PROXY_WILDCARD_DOMAIN=$(
kubectl get ingress "${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAME}" --namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" \
--output go-template="{{ range .spec.tls }}{{ if eq .secretName \"${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_SECRET}\" }}{{ index .hosts 0 }}{{ break }}{{ end }}{{ end }}"
)
if [ -z "${GITLAB_WORKSPACES_PROXY_WILDCARD_DOMAIN}" ]; then
echo "Unable to fetch the value from existing helm release. Using default."
GITLAB_WORKSPACES_PROXY_WILDCARD_DOMAIN="*.workspaces.localdev.me"
fi
fi
if [ -z "${GITLAB_WORKSPACES_PROXY_REDIRECT_URI}" ]; then
echo "GITLAB_WORKSPACES_PROXY_REDIRECT_URI is not explicitly set. Using default."
GITLAB_WORKSPACES_PROXY_REDIRECT_URI="https://${GITLAB_WORKSPACES_PROXY_DOMAIN}/auth/callback"
fi
if [ -z "${GITLAB_WORKSPACES_PROXY_SIGNING_KEY}" ]; then
echo "GITLAB_WORKSPACES_PROXY_SIGNING_KEY is not explicitly set. Using default."
GITLAB_WORKSPACES_PROXY_SIGNING_KEY="a_random_key_consisting_of_letters_numbers_and_special_chars"
fi
if [ -z "${GITLAB_WORKSPACES_PROXY_SSH_HOST_KEY}" ]; then
echo "GITLAB_WORKSPACES_PROXY_SSH_HOST_KEY is not explicitly set. Using default."
GITLAB_WORKSPACES_PROXY_SSH_HOST_KEY_FILE="${ROOT_DIR}/gitlab-workspaces-proxy-ssh-host-key"
ssh-keygen -f "${GITLAB_WORKSPACES_PROXY_SSH_HOST_KEY_FILE}" -N '' -t rsa
GITLAB_WORKSPACES_PROXY_SSH_HOST_KEY=$(cat "${GITLAB_WORKSPACES_PROXY_SSH_HOST_KEY_FILE}")
fi
if [ -z "${GITLAB_WORKSPACES_PROXY_TLS_CERT_FILE}" ]; then
echo "GITLAB_WORKSPACES_PROXY_TLS_CERT_FILE is not explicitly set. Using default."
GITLAB_WORKSPACES_PROXY_TLS_CERT_FILE="${ROOT_DIR}/gitlab_workspaces_proxy_tls_cert"
if [ "${GITLAB_WORKSPACES_PROXY_DOMAIN}" != "workspaces.localdev.me" ]; then
echo "GITLAB_WORKSPACES_PROXY_DOMAIN is non-default. Trying to fetch the value from existing helm release"
kubectl get secret "${GITLAB_WORKSPACES_PROXY_TLS_SECRET}" \
--namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" \
--output go-template='{{ index .data "tls.crt" | base64decode }}' \
> "${GITLAB_WORKSPACES_PROXY_TLS_CERT_FILE}" \
|| echo "Unable to fetch the value from existing helm release"
else
GITLAB_WORKSPACES_PROXY_TLS_GENERATE=true
fi
fi
if [ -z "${GITLAB_WORKSPACES_PROXY_TLS_KEY_FILE}" ]; then
echo "GITLAB_WORKSPACES_PROXY_TLS_KEY_FILE is not explicitly set. Using default."
GITLAB_WORKSPACES_PROXY_TLS_KEY_FILE="${ROOT_DIR}/gitlab_workspaces_proxy_tls_key"
if [ "${GITLAB_WORKSPACES_PROXY_DOMAIN}" != "workspaces.localdev.me" ]; then
echo "GITLAB_WORKSPACES_PROXY_DOMAIN is non-default. Trying to fetch the value from existing helm release"
kubectl get secret "${GITLAB_WORKSPACES_PROXY_TLS_SECRET}" \
--namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" \
--output go-template='{{ index .data "tls.key" | base64decode }}' \
> "${GITLAB_WORKSPACES_PROXY_TLS_KEY_FILE}" \
|| echo "Unable to fetch the value from existing helm release"
else
GITLAB_WORKSPACES_PROXY_TLS_GENERATE=true
fi
fi
if [ "${GITLAB_WORKSPACES_PROXY_TLS_GENERATE}" == true ]; then
mkcert -install
mkcert \
--cert-file="${GITLAB_WORKSPACES_PROXY_TLS_CERT_FILE}" \
--key-file="${GITLAB_WORKSPACES_PROXY_TLS_KEY_FILE}" \
"${GITLAB_WORKSPACES_PROXY_DOMAIN}"
fi
if [ ! -f "${GITLAB_WORKSPACES_PROXY_TLS_CERT_FILE}" ]; then
echo "GITLAB_WORKSPACES_PROXY_TLS_CERT_FILE is not found."
exit 1
fi
if [ ! -f "${GITLAB_WORKSPACES_PROXY_TLS_KEY_FILE}" ]; then
echo "GITLAB_WORKSPACES_PROXY_TLS_KEY_FILE is not found."
exit 1
fi
if [ -z "${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_CERT_FILE}" ]; then
echo "GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_CERT_FILE is not explicitly set. Using default."
GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_CERT_FILE="${ROOT_DIR}/gitlab_workspaces_proxy_wildcard_tls_cert"
if [ "${GITLAB_WORKSPACES_PROXY_WILDCARD_DOMAIN}" != "*.workspaces.localdev.me" ]; then
echo "GITLAB_WORKSPACES_PROXY_WILDCARD_DOMAIN is non-default. Trying to fetch the value from existing helm release"
kubectl get secret "${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_SECRET}" \
--namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" \
--output go-template='{{ index .data "tls.crt" | base64decode }}' \
> "${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_CERT_FILE}" \
|| echo "Unable to fetch the value from existing helm release"
else
GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_GENERATE=true
fi
fi
if [ -z "${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_KEY_FILE}" ]; then
echo "GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_KEY_FILE is not explicitly set. Using default."
GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_KEY_FILE="${ROOT_DIR}/gitlab_workspaces_proxy_wildcard_tls_key"
if [ "${GITLAB_WORKSPACES_PROXY_WILDCARD_DOMAIN}" != "*.workspaces.localdev.me" ]; then
echo "GITLAB_WORKSPACES_PROXY_WILDCARD_DOMAIN is non-default. Trying to fetch the value from existing helm release"
kubectl get secret "${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_SECRET}" \
--namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" \
--output go-template='{{ index .data "tls.key" | base64decode }}' \
> "${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_KEY_FILE}" \
|| echo "Unable to fetch the value from existing helm release"
else
GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_GENERATE=true
fi
fi
if [ "${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_GENERATE}" == true ]; then
mkcert -install
mkcert \
--cert-file="${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_CERT_FILE}" \
--key-file="${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_KEY_FILE}" \
"${GITLAB_WORKSPACES_PROXY_WILDCARD_DOMAIN}"
fi
if [ ! -f "${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_CERT_FILE}" ]; then
echo "GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_CERT_FILE is not found."
exit 1
fi
if [ ! -f "${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_KEY_FILE}" ]; then
echo "GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_KEY_FILE is not found."
exit 1
fi
if [ -z "${GITLAB_URL}" ]; then
echo "GITLAB_URL is not explicitly set. Trying to fetch the value from existing helm release"
GITLAB_URL=$(
kubectl get secret "${GITLAB_WORKSPACES_PROXY_CONFIG_SECRET}" --namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" \
--output go-template='{{ index .data "auth.host" | base64decode }}'
)
if [ -z "${GITLAB_URL}" ]; then
echo "Unable to fetch the value from existing helm release. Using default."
GITLAB_URL="http://gdk.test:3000"
fi
fi
if [ -z "${CLIENT_ID}" ]; then
echo "CLIENT_ID is not explicitly set. Trying to fetch the value from existing helm release"
CLIENT_ID=$(
kubectl get secret gitlab-workspaces-proxy-config --namespace="gitlab-workspaces" \
kubectl get secret "${GITLAB_WORKSPACES_PROXY_CONFIG_SECRET}" --namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" \
--output go-template='{{ index .data "auth.client_id" | base64decode }}'
)
if [ -z "${CLIENT_ID}" ]; then
@ -35,7 +237,7 @@ fi
if [ -z "${CLIENT_SECRET}" ]; then
echo "CLIENT_SECRET is not explicitly set. Trying to fetch the value from existing helm release"
CLIENT_SECRET=$(
kubectl get secret gitlab-workspaces-proxy-config --namespace="gitlab-workspaces" \
kubectl get secret "${GITLAB_WORKSPACES_PROXY_CONFIG_SECRET}" --namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" \
--output go-template='{{ index .data "auth.client_secret" | base64decode }}'
)
if [ -z "${CLIENT_SECRET}" ]; then
@ -45,22 +247,28 @@ if [ -z "${CLIENT_SECRET}" ]; then
fi
fi
if [ -z "${GITLAB_WORKSPACES_PROXY_HELM_CHART_VERSION}" ]; then
echo "GITLAB_WORKSPACES_PROXY_HELM_CHART_VERSION is not explicitly set. Using '0.1.17'."
GITLAB_WORKSPACES_PROXY_HELM_CHART_VERSION="0.1.17"
if [ "${GITLAB_WORKSPACES_PROXY_TLS_GENERATE}" == true ]; then
mkcert -install
mkcert \
--cert-file="${GITLAB_WORKSPACES_PROXY_TLS_CERT_FILE}" \
--key-file="${GITLAB_WORKSPACES_PROXY_TLS_KEY_FILE}" \
"${GITLAB_WORKSPACES_PROXY_DOMAIN}"
fi
if [ -z "${INGRESS_NGINX_HELM_CHART_VERSION}" ]; then
echo "INGRESS_NGINX_HELM_CHART_VERSION is not explicitly set. Using '4.12.0'."
INGRESS_NGINX_HELM_CHART_VERSION="4.12.0"
if [ "${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_GENERATE}" == true ]; then
mkcert -install
mkcert \
--cert-file="${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_CERT_FILE}" \
--key-file="${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_KEY_FILE}" \
"${GITLAB_WORKSPACES_PROXY_WILDCARD_DOMAIN}"
fi
ROOT_DIR="${HOME}/.gitlab-workspaces-proxy"
mkdir -p "${ROOT_DIR}"
# =====================================
# install ingress-nginx
# =====================================
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx --force-update
helm repo update
helm --namespace ingress-nginx uninstall ingress-nginx --ignore-not-found --timeout=600s --wait
helm upgrade --install \
@ -84,80 +292,66 @@ else
exit 1
fi
# =====================================
# install gitlab-workspaces-proxy
export GITLAB_WORKSPACES_PROXY_DOMAIN="workspaces.localdev.me"
export GITLAB_WORKSPACES_WILDCARD_DOMAIN="*.workspaces.localdev.me"
export REDIRECT_URI="https://${GITLAB_WORKSPACES_PROXY_DOMAIN}/auth/callback"
export SSH_HOST_KEY="${ROOT_DIR}/gitlab-workspaces-proxy-ssh-host-key"
export GITLAB_URL="http://gdk.test:3000"
export SIGNING_KEY="a_random_key_consisting_of_letters_numbers_and_special_chars"
# install self-signed certs
rm -f "${ROOT_DIR}/workspaces.localdev.me+1.pem" "${ROOT_DIR}/workspaces.localdev.me+1-key.pem"
mkcert -install
mkcert \
--cert-file="${ROOT_DIR}/workspaces.localdev.me+1.pem" \
--key-file="${ROOT_DIR}/workspaces.localdev.me+1-key.pem" \
"${GITLAB_WORKSPACES_PROXY_DOMAIN}" "${GITLAB_WORKSPACES_WILDCARD_DOMAIN}"
# generate ssh host key
rm -f "${SSH_HOST_KEY}"
ssh-keygen -f "${ROOT_DIR}/gitlab-workspaces-proxy-ssh-host-key" -N '' -t rsa
# create kubernetes secrets required by the gitlab-workspaces-proxy helm chart
if kubectl get namespace gitlab-workspaces;
# =====================================
# create the kubernetes namespace if it does not exists
if kubectl get namespace "${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}";
then
echo "Namespace 'gitlab-workspaces' already exists."
echo "Namespace '${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}' already exists."
else
echo "Namespace 'gitlab-workspaces' does not exists. Creating it."
kubectl create namespace gitlab-workspaces
echo "Namespace '${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}' does not exists. Creating it."
kubectl create namespace "${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}"
fi
kubectl delete secret gitlab-workspaces-proxy-config --namespace="gitlab-workspaces" || true
kubectl create secret generic gitlab-workspaces-proxy-config \
--namespace="gitlab-workspaces" \
# create the kubernetes config secret
kubectl delete secret "${GITLAB_WORKSPACES_PROXY_CONFIG_SECRET}" --namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" || true
kubectl create secret generic "${GITLAB_WORKSPACES_PROXY_CONFIG_SECRET}" \
--namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" \
--from-literal="auth.client_id=${CLIENT_ID}" \
--from-literal="auth.client_secret=${CLIENT_SECRET}" \
--from-literal="auth.host=${GITLAB_URL}" \
--from-literal="auth.redirect_uri=${REDIRECT_URI}" \
--from-literal="auth.signing_key=${SIGNING_KEY}" \
--from-literal="ssh.host_key=$(cat "${SSH_HOST_KEY}")"
--from-literal="auth.redirect_uri=${GITLAB_WORKSPACES_PROXY_REDIRECT_URI}" \
--from-literal="auth.signing_key=${GITLAB_WORKSPACES_PROXY_SIGNING_KEY}" \
--from-literal="ssh.host_key=${GITLAB_WORKSPACES_PROXY_SSH_HOST_KEY}"
kubectl delete secret gitlab-workspace-proxy-tls --namespace="gitlab-workspaces" || true
kubectl create secret tls gitlab-workspace-proxy-tls \
--namespace="gitlab-workspaces" \
--cert="${ROOT_DIR}/workspaces.localdev.me+1.pem" \
--key="${ROOT_DIR}/workspaces.localdev.me+1-key.pem"
# create the kubernetes tls secret
kubectl delete secret "${GITLAB_WORKSPACES_PROXY_TLS_SECRET}" --namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" || true
kubectl create secret tls "${GITLAB_WORKSPACES_PROXY_TLS_SECRET}" \
--namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" \
--cert="${GITLAB_WORKSPACES_PROXY_TLS_CERT_FILE}" \
--key="${GITLAB_WORKSPACES_PROXY_TLS_KEY_FILE}"
kubectl delete secret gitlab-workspace-proxy-wildcard-tls --namespace="gitlab-workspaces" || true
kubectl create secret tls gitlab-workspace-proxy-wildcard-tls \
--namespace="gitlab-workspaces" \
--cert="${ROOT_DIR}/workspaces.localdev.me+1.pem" \
--key="${ROOT_DIR}/workspaces.localdev.me+1-key.pem"
# create the kubernetes wildcard tls secret
kubectl delete secret "${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_SECRET}" --namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" || true
kubectl create secret tls "${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_SECRET}" \
--namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" \
--cert="${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_CERT_FILE}" \
--key="${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_KEY_FILE}"
# install gitlab-workspaces-proxy helm chart
# install the helm chart
helm repo add gitlab-workspaces-proxy \
https://gitlab.com/api/v4/projects/gitlab-org%2fworkspaces%2fgitlab-workspaces-proxy/packages/helm/devel \
--force-update
helm repo update
helm --namespace gitlab-workspaces uninstall gitlab-workspaces-proxy --ignore-not-found --timeout=600s --wait
helm --namespace "${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" uninstall "${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAME}" --ignore-not-found --timeout=600s --wait
helm upgrade --install gitlab-workspaces-proxy \
helm upgrade --install "${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAME}" \
gitlab-workspaces-proxy/gitlab-workspaces-proxy \
--version="${GITLAB_WORKSPACES_PROXY_HELM_CHART_VERSION}" \
--namespace="gitlab-workspaces" \
--namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" \
--set="ingress.enabled=true" \
--set="ingress.hosts[0].host=${GITLAB_WORKSPACES_PROXY_DOMAIN}" \
--set="ingress.hosts[0].paths[0].path=/" \
--set="ingress.hosts[0].paths[0].pathType=ImplementationSpecific" \
--set="ingress.hosts[1].host=${GITLAB_WORKSPACES_WILDCARD_DOMAIN}" \
--set="ingress.hosts[1].host=${GITLAB_WORKSPACES_PROXY_WILDCARD_DOMAIN}" \
--set="ingress.hosts[1].paths[0].path=/" \
--set="ingress.hosts[1].paths[0].pathType=ImplementationSpecific" \
--set="ingress.tls[0].hosts[0]=${GITLAB_WORKSPACES_PROXY_DOMAIN}" \
--set="ingress.tls[0].secretName=gitlab-workspace-proxy-tls" \
--set="ingress.tls[1].hosts[0]=${GITLAB_WORKSPACES_WILDCARD_DOMAIN}" \
--set="ingress.tls[1].secretName=gitlab-workspace-proxy-wildcard-tls" \
--set="ingress.tls[0].secretName=${GITLAB_WORKSPACES_PROXY_TLS_SECRET}" \
--set="ingress.tls[1].hosts[0]=${GITLAB_WORKSPACES_PROXY_WILDCARD_DOMAIN}" \
--set="ingress.tls[1].secretName=${GITLAB_WORKSPACES_PROXY_WILDCARD_TLS_SECRET}" \
--set="ingress.className=nginx" \
--timeout=600s --wait --wait-for-jobs
@ -178,10 +372,5 @@ fi
# print the configuration secret to verify
echo "Printing the contents of the configuration secret to verify"
# shellcheck disable=SC2016 # The expression in the go template do not have to be expanded.
kubectl get secret gitlab-workspaces-proxy-config --namespace="gitlab-workspaces" \
kubectl get secret "${GITLAB_WORKSPACES_PROXY_CONFIG_SECRET}" --namespace="${GITLAB_WORKSPACES_PROXY_HELM_RELEASE_NAMESPACE}" \
--output go-template='{{range $k, $v := .data}}{{printf "%s: " $k}}{{printf "%s" $v | base64decode}}{{"\n"}}{{end}}'
# cleanup
rm -f "${SSH_HOST_KEY}" \
"${ROOT_DIR}/workspaces.localdev.me+1.pem" \
"${ROOT_DIR}/workspaces.localdev.me+1-key.pem"

View File

@ -6,6 +6,8 @@ import {
TOKEN_TYPE_STATUS,
TOKEN_TYPE_JOBS_RUNNER_TYPE,
TOKEN_TITLE_JOBS_RUNNER_TYPE,
TOKEN_TITLE_JOBS_SOURCE,
TOKEN_TYPE_JOBS_SOURCE,
} from '~/vue_shared/components/filtered_search_bar/constants';
import JobsFilteredSearch from '~/ci/common/private/jobs_filtered_search/app.vue';
import { mockFailedSearchToken } from 'jest/ci/jobs_mock_data';
@ -21,6 +23,7 @@ describe('Jobs filtered search', () => {
const findStatusToken = () => getSearchToken('status');
const findRunnerTypeToken = () => getSearchToken('jobs-runner-type');
const findJobSourceToken = () => getSearchToken('jobs-source');
const createComponent = (props, provideOptions = {}) => {
wrapper = shallowMount(JobsFilteredSearch, {
@ -28,7 +31,11 @@ describe('Jobs filtered search', () => {
...props,
},
provide: {
glFeatures: { adminJobsFilterRunnerType: true, feSearchBuildByName: true },
glFeatures: {
adminJobsFilterRunnerType: true,
feSearchBuildByName: true,
populateAndUseBuildSourceTable: true,
},
...provideOptions,
},
});
@ -65,6 +72,18 @@ describe('Jobs filtered search', () => {
});
});
it('displays job source token', () => {
createComponent();
expect(findJobSourceToken()).toMatchObject({
type: TOKEN_TYPE_JOBS_SOURCE,
icon: 'trigger-source',
title: TOKEN_TITLE_JOBS_SOURCE,
unique: true,
operators: OPERATORS_IS,
});
});
it('displays token for runner type', () => {
createComponent();
@ -118,6 +137,32 @@ describe('Jobs filtered search', () => {
});
});
describe('when feature flag `populateAndUseBuildSourceTable` is disabled', () => {
const provideOptions = { glFeatures: { populateAndUseBuildSourceTable: false } };
it('does not display token for job source', () => {
createComponent(null, provideOptions);
expect(findJobSourceToken()).toBeUndefined();
});
describe('with query string passed', () => {
it('filtered search returns only data shape for search token `status` and not for search token `job source`', () => {
const tokenStatusesValue = 'SUCCESS';
const tokenJobSourceValue = 'PUSH';
createComponent(
{ queryString: { statuses: tokenStatusesValue, sources: tokenJobSourceValue } },
provideOptions,
);
expect(findFilteredSearch().props('value')).toEqual([
{ type: TOKEN_TYPE_STATUS, value: { data: tokenStatusesValue, operator: '=' } },
]);
});
});
});
describe('when feature flag `adminJobsFilterRunnerType` is disabled', () => {
const provideOptions = { glFeatures: { adminJobsFilterRunnerType: false } };

View File

@ -0,0 +1,78 @@
import { GlFilteredSearchToken, GlFilteredSearchSuggestion } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { stubComponent } from 'helpers/stub_component';
import JobSourceToken from '~/ci/common/private/jobs_filtered_search/tokens/job_source_token.vue';
import { JOB_SOURCES } from 'ee_else_ce/ci/common/private/jobs_filtered_search/tokens/constants';
import {
TOKEN_TITLE_JOBS_SOURCE,
TOKEN_TYPE_JOBS_SOURCE,
} from '~/vue_shared/components/filtered_search_bar/constants';
describe('Job Source Token', () => {
let wrapper;
const findFilteredSearchToken = () => wrapper.findComponent(GlFilteredSearchToken);
const findAllFilteredSearchSuggestions = () =>
wrapper.findAllComponents(GlFilteredSearchSuggestion);
const findActiveSourceText = () => wrapper.find('[data-testid="job-source-text"]').text();
const defaultProps = {
config: {
type: TOKEN_TYPE_JOBS_SOURCE,
icon: 'trigger-source',
title: TOKEN_TITLE_JOBS_SOURCE,
unique: true,
},
value: {
data: '',
},
cursorPosition: 'start',
};
const createComponent = (props = {}) => {
wrapper = shallowMount(JobSourceToken, {
propsData: {
...defaultProps,
...props,
},
stubs: {
GlFilteredSearchToken: stubComponent(GlFilteredSearchToken, {
template: `
<div>
<div class="view-slot"><slot name="view"></slot></div>
<div class="suggestions-slot"><slot name="suggestions"></slot></div>
</div>
`,
}),
},
});
};
beforeEach(() => {
createComponent();
});
it('passes config correctly', () => {
expect(findFilteredSearchToken().props('config')).toEqual(defaultProps.config);
});
it('renders all job sources available', () => {
expect(findAllFilteredSearchSuggestions()).toHaveLength(JOB_SOURCES.length);
});
it('updates the displayed text when value prop changes', async () => {
// Start with web source
createComponent({
value: { data: 'WEB' },
});
expect(findActiveSourceText()).toBe('Web');
// Update to pipeline source
await wrapper.setProps({
value: { data: 'PIPELINE' },
});
expect(findActiveSourceText()).toBe('Pipeline');
});
});

View File

@ -9,7 +9,10 @@ import mockAllJobs from 'test_fixtures/graphql/jobs/get_all_jobs.query.graphql.j
import mockJobsAsGuest from 'test_fixtures/graphql/jobs/get_jobs.query.graphql.as_guest.json';
import mockCancelableJobsCount from 'test_fixtures/graphql/jobs/get_cancelable_jobs_count.query.graphql.json';
import { TEST_HOST } from 'spec/test_constants';
import { TOKEN_TYPE_STATUS } from '~/vue_shared/components/filtered_search_bar/constants';
import {
TOKEN_TYPE_STATUS,
TOKEN_TYPE_JOBS_SOURCE,
} from '~/vue_shared/components/filtered_search_bar/constants';
const threeWeeksAgo = new Date();
threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
@ -1393,6 +1396,11 @@ export const mockFailedSearchToken = {
value: { data: 'FAILED', operator: '=' },
};
export const mockPushSourceToken = {
type: TOKEN_TYPE_JOBS_SOURCE,
value: { data: 'PUSH', operator: '=' },
};
export const retryMutationResponse = {
data: {
jobRetry: {

View File

@ -19,6 +19,7 @@ import {
mockJobsResponseEmpty,
mockFailedSearchToken,
mockJobsCountResponse,
mockPushSourceToken,
} from 'jest/ci/jobs_mock_data';
import { RAW_TEXT_WARNING, DEFAULT_PAGINATION, JOBS_PER_PAGE } from '~/ci/jobs_page/constants';
@ -29,6 +30,7 @@ jest.mock('~/alert');
jest.mock('~/graphql_shared/utils');
const mockJobName = 'rspec-job';
const mockJobSource = mockPushSourceToken.value.data;
describe('Job table app', () => {
let wrapper;
@ -60,13 +62,15 @@ describe('Job table app', () => {
handler = successHandler,
countHandler = countSuccessHandler,
mountFn = shallowMount,
flagState = false,
feSearchBuildByName = false,
populateAndUseBuildSourceTable = false,
} = {}) => {
wrapper = mountFn(JobsTableApp, {
provide: {
fullPath: projectPath,
glFeatures: {
feSearchBuildByName: flagState,
feSearchBuildByName,
populateAndUseBuildSourceTable,
},
},
apolloProvider: createMockApolloProvider(handler, countHandler),
@ -227,11 +231,13 @@ describe('Job table app', () => {
expect(successHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
statuses: 'FAILED',
sources: null,
...DEFAULT_PAGINATION,
});
expect(countSuccessHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
statuses: 'FAILED',
sources: null,
});
});
@ -290,11 +296,13 @@ describe('Job table app', () => {
expect(successHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
statuses: 'FAILED',
sources: null,
...DEFAULT_PAGINATION,
});
expect(countSuccessHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
statuses: 'FAILED',
sources: null,
});
expect(urlUtils.updateHistory).toHaveBeenCalledWith({
url: `${TEST_HOST}/?statuses=FAILED`,
@ -309,17 +317,19 @@ describe('Job table app', () => {
expect(successHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
statuses: null,
sources: null,
...DEFAULT_PAGINATION,
});
expect(countSuccessHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
statuses: null,
sources: null,
});
});
describe('with feature flag feSearchBuildByName enabled', () => {
beforeEach(() => {
createComponent({ flagState: true });
createComponent({ feSearchBuildByName: true });
});
it('filters jobs by name', async () => {
@ -433,6 +443,126 @@ describe('Job table app', () => {
});
});
});
describe('with feature flag populateAndUseBuildSourceTable enabled', () => {
beforeEach(() => {
createComponent({ populateAndUseBuildSourceTable: true });
});
it('filters jobs by source', async () => {
await findFilteredSearch().vm.$emit('filterJobsBySearch', [mockPushSourceToken]);
expect(successHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
sources: mockJobSource,
statuses: null,
...DEFAULT_PAGINATION,
});
expect(countSuccessHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
sources: mockJobSource,
statuses: null,
});
});
it('filters only by source after removing status filter', async () => {
await findFilteredSearch().vm.$emit('filterJobsBySearch', [
mockFailedSearchToken,
mockPushSourceToken,
]);
expect(successHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
sources: mockJobSource,
statuses: 'FAILED',
...DEFAULT_PAGINATION,
});
expect(countSuccessHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
sources: mockJobSource,
statuses: 'FAILED',
});
await findFilteredSearch().vm.$emit('filterJobsBySearch', [mockPushSourceToken]);
expect(successHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
sources: mockJobSource,
statuses: null,
...DEFAULT_PAGINATION,
});
expect(countSuccessHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
sources: mockJobSource,
statuses: null,
});
});
it('updates URL query string when filtering jobs by source', async () => {
jest.spyOn(urlUtils, 'updateHistory');
await findFilteredSearch().vm.$emit('filterJobsBySearch', [mockPushSourceToken]);
expect(urlUtils.updateHistory).toHaveBeenCalledWith({
url: `${TEST_HOST}/?sources=${mockJobSource}`,
});
});
it('updates URL query string when filtering jobs by source and status', async () => {
jest.spyOn(urlUtils, 'updateHistory');
await findFilteredSearch().vm.$emit('filterJobsBySearch', [
mockFailedSearchToken,
mockPushSourceToken,
]);
expect(urlUtils.updateHistory).toHaveBeenCalledWith({
url: `${TEST_HOST}/?statuses=FAILED&sources=${mockJobSource}`,
});
});
it('resets query param after clearing tokens', () => {
jest.spyOn(urlUtils, 'updateHistory');
findFilteredSearch().vm.$emit('filterJobsBySearch', [
mockFailedSearchToken,
mockPushSourceToken,
]);
expect(successHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
statuses: 'FAILED',
sources: mockJobSource,
...DEFAULT_PAGINATION,
});
expect(countSuccessHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
statuses: 'FAILED',
sources: mockJobSource,
});
expect(urlUtils.updateHistory).toHaveBeenCalledWith({
url: `${TEST_HOST}/?statuses=FAILED&sources=${mockJobSource}`,
});
findFilteredSearch().vm.$emit('filterJobsBySearch', []);
expect(urlUtils.updateHistory).toHaveBeenCalledWith({
url: `${TEST_HOST}/`,
});
expect(successHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
statuses: null,
sources: null,
...DEFAULT_PAGINATION,
});
expect(countSuccessHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
statuses: null,
sources: null,
});
});
});
});
describe('pagination', () => {
@ -533,6 +663,7 @@ describe('Job table app', () => {
expect(successHandler).toHaveBeenCalledWith({
fullPath: 'gitlab-org/gitlab',
statuses: 'FAILED',
sources: null,
...DEFAULT_PAGINATION,
});
});

View File

@ -37,9 +37,11 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillPCiPipelineVariablesFromCiTr
let!(:pipeline2) { ci_pipeline.create!(partition_id: 100, project_id: 1) }
before do
ci_trigger_request.create!(commit_id: pipeline1.id, trigger_id: trigger1.id, variables: { ENV1: true })
ci_trigger_request.create!(commit_id: pipeline2.id, trigger_id: trigger2.id, variables: { ENV2: false })
ci_trigger_request.create!(commit_id: nil, trigger_id: trigger3.id)
ci_trigger_request.create!(commit_id: pipeline1.id, trigger_id: trigger1.id,
variables: { ENV1: true }, project_id: 1)
ci_trigger_request.create!(commit_id: pipeline2.id, trigger_id: trigger2.id,
variables: { ENV2: false }, project_id: 1)
ci_trigger_request.create!(commit_id: nil, trigger_id: trigger3.id, project_id: 1)
end
it 'upserts p_ci_pipeline_variables' do
@ -73,9 +75,9 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillPCiPipelineVariablesFromCiTr
context 'when one pipeline has multiple ci_trigger_requests' do
before do
ci_trigger_request.create!(commit_id: pipeline1.id, trigger_id: trigger1.id,
variables: { ENV1: true, VAR1_ONLY: true })
variables: { ENV1: true, VAR1_ONLY: true }, project_id: 1)
ci_trigger_request.create!(commit_id: pipeline1.id, trigger_id: trigger1.id,
variables: { ENV1: false, VAR2_ONLY: false })
variables: { ENV1: false, VAR2_ONLY: false }, project_id: 1)
end
it 'upserts p_ci_pipeline_variables' do

View File

@ -37,10 +37,10 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillPCiPipelinesTriggerId,
context 'when ci_trigger_requests belongs to only one pipeline' do
before do
ci_trigger_request.create!(commit_id: pipeline1.id, trigger_id: trigger1.id)
ci_trigger_request.create!(commit_id: pipeline2.id, trigger_id: trigger2.id)
ci_trigger_request.create!(commit_id: pipeline3.id, trigger_id: trigger3.id)
ci_trigger_request.create!(commit_id: nil, trigger_id: trigger4.id)
ci_trigger_request.create!(commit_id: pipeline1.id, trigger_id: trigger1.id, project_id: 1)
ci_trigger_request.create!(commit_id: pipeline2.id, trigger_id: trigger2.id, project_id: 1)
ci_trigger_request.create!(commit_id: pipeline3.id, trigger_id: trigger3.id, project_id: 1)
ci_trigger_request.create!(commit_id: nil, trigger_id: trigger4.id, project_id: 1)
end
it 'updates p_ci_pipelines.trigger_id' do
@ -65,12 +65,12 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillPCiPipelinesTriggerId,
context 'when ci_trigger_requests belongs to multiple pipelines' do
before do
ci_trigger_request.create!(commit_id: pipeline1.id, trigger_id: trigger1.id)
ci_trigger_request.create!(commit_id: pipeline1.id, trigger_id: trigger1.id)
ci_trigger_request.create!(commit_id: pipeline1.id, trigger_id: trigger1.id)
ci_trigger_request.create!(commit_id: pipeline2.id, trigger_id: trigger2.id)
ci_trigger_request.create!(commit_id: pipeline3.id, trigger_id: trigger3.id)
ci_trigger_request.create!(commit_id: nil, trigger_id: trigger4.id)
ci_trigger_request.create!(commit_id: pipeline1.id, trigger_id: trigger1.id, project_id: 1)
ci_trigger_request.create!(commit_id: pipeline1.id, trigger_id: trigger1.id, project_id: 1)
ci_trigger_request.create!(commit_id: pipeline1.id, trigger_id: trigger1.id, project_id: 1)
ci_trigger_request.create!(commit_id: pipeline2.id, trigger_id: trigger2.id, project_id: 1)
ci_trigger_request.create!(commit_id: pipeline3.id, trigger_id: trigger3.id, project_id: 1)
ci_trigger_request.create!(commit_id: nil, trigger_id: trigger4.id, project_id: 1)
end
it 'updates p_ci_pipelines.trigger_id' do

View File

@ -8,9 +8,9 @@ RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedStageRecords,
let(:stages_table) { table(:p_ci_stages, database: :ci, primary_key: :id) }
let(:default_attributes) { { project_id: 600, partition_id: 100 } }
let!(:regular_pipeline) { pipelines_table.create!(id: 1, **default_attributes) }
let!(:deleted_pipeline) { pipelines_table.create!(id: 2, **default_attributes) }
let!(:other_pipeline) { pipelines_table.create!(id: 3, **default_attributes) }
let!(:regular_pipeline) { pipelines_table.create!(default_attributes) }
let!(:deleted_pipeline) { pipelines_table.create!(default_attributes) }
let!(:other_pipeline) { pipelines_table.create!(default_attributes) }
let!(:regular_build) do
stages_table.create!(pipeline_id: regular_pipeline.id, **default_attributes)
@ -22,20 +22,6 @@ RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedStageRecords,
let(:connection) { Ci::ApplicationRecord.connection }
around do |example|
connection.transaction do
connection.execute(<<~SQL)
ALTER TABLE ci_pipelines DISABLE TRIGGER ALL;
SQL
example.run
connection.execute(<<~SQL)
ALTER TABLE ci_pipelines ENABLE TRIGGER ALL;
SQL
end
end
describe '#perform' do
subject(:migration) do
described_class.new(
@ -50,12 +36,21 @@ RSpec.describe Gitlab::BackgroundMigration::DeleteOrphanedStageRecords,
end
it 'deletes from p_ci_stages where pipeline_id has no related record at p_ci_pipelines.id', :aggregate_failures do
expect { deleted_pipeline.delete }.to not_change { stages_table.count }
expect { without_referential_integrity { deleted_pipeline.delete } }.to not_change { stages_table.count }
expect { migration.perform }.to change { stages_table.count }.from(2).to(1)
expect(regular_build.reload).to be_persisted
expect { orphaned_build.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
def without_referential_integrity
connection.transaction do
connection.execute('ALTER TABLE ci_pipelines DISABLE TRIGGER ALL;')
result = yield
connection.execute('ALTER TABLE ci_pipelines ENABLE TRIGGER ALL;')
result
end
end
end
end

View File

@ -3836,6 +3836,45 @@ module Gitlab
end
end
end
context 'with valid and invalid publish configurations' do
where(:config, :expected_errors) do
[
[
YAML.dump(
{ pages: { stage: 'deploy', script: 'test', pages: { publish: 'custom' }, publish: 'public' } }),
['pages job: use either pages:publish or pages:pages:publish']
],
[
YAML.dump({ pages: { stage: 'deploy', script: 'test', publish: 'public' } }),
[]
],
[
YAML.dump(
{ pages: { stage: 'deploy', script: 'test', publish: 'public', pages: { path_prefix: 'foo' } } }),
[]
],
[
YAML.dump({ pages: { stage: 'deploy', script: 'test', pages: { publish: 'custom' } } }),
[]
],
[
YAML.dump({ pages: { stage: 'deploy', script: 'test', pages: true } }),
[]
],
[
YAML.dump({ pages: { stage: 'deploy', script: 'test', pages: true, publish: 'public' } }),
[]
]
]
end
with_them do
it 'validates publish configurations and returns errors if there are any' do
expect(subject.errors).to match_array(expected_errors)
end
end
end
end
end
end

View File

@ -15,23 +15,23 @@ RSpec.describe Gitlab::Database::MigrationHelpers::WraparoundAutovacuum, feature
using RSpec::Parameterized::TableSyntax
where(:dot_com, :jh, :dev_or_test, :wraparound_prevention, :expectation) do
true | true | true | true | false
true | true | false | true | false
false | true | true | true | false
false | true | false | true | false
true | true | true | true | true
true | true | false | true | true
false | true | true | true | true
false | true | false | true | true
true | true | true | false | true
true | true | false | false | false
true | true | false | false | true
false | true | true | false | true
false | true | false | false | false
false | true | false | false | true
true | false | true | true | false
true | false | false | true | false
false | false | true | true | false
false | false | false | true | false
false | false | true | true | true
false | false | false | true | true
true | false | true | false | true
true | false | false | false | true
false | false | true | false | true
false | false | false | false | false
false | false | false | false | true
end
with_them do

View File

@ -66,6 +66,7 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
'ci_build_pending_states.project_id', # LFK already present on p_ci_builds and cascade delete all ci resources
'ci_builds_runner_session.project_id', # LFK already present on p_ci_builds and cascade delete all ci resources
'p_ci_pipelines_config.project_id', # LFK already present on p_ci_pipelines and cascade delete all ci resources
'ci_trigger_requests.project_id', # LFK already present on ci_triggers and cascade delete all ci resources
'ci_unit_test_failures.project_id', # LFK already present on ci_unit_tests and cascade delete all ci resources
'dast_profiles_pipelines.project_id', # LFK already present on dast_profiles and will cascade delete
'dast_scanner_profiles_builds.project_id', # LFK already present on dast_scanner_profiles and will cascade delete