Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-11-16 03:11:39 +00:00
parent 21fc10ad5a
commit 37d1a6b416
35 changed files with 460 additions and 168 deletions

View File

@ -18,7 +18,7 @@ build-qa-image:
- ./scripts/build_qa_image
# This image is used by:
# - The `CNG` downstream pipelines (we pass the image tag via the `review-build-cng` job): https://gitlab.com/gitlab-org/gitlab/-/blob/c34e0834b01cd45c1f69a01b5e38dd6bc505f903/.gitlab/ci/review-apps/main.gitlab-ci.yml#L69
# - The `CNG` pipelines (via the `review-build-cng` job): https://gitlab.com/gitlab-org/build/CNG/-/blob/cfc67136d711e1c8c409bf8e57427a644393da2f/.gitlab-ci.yml#L335
# - The `omnibus-gitlab` pipelines (via the `e2e:package-and-test` job): https://gitlab.com/gitlab-org/omnibus-gitlab/-/blob/dfd1ad475868fc84e91ab7b5706aa03e46dc3a86/.gitlab-ci.yml#L130
build-assets-image:
extends:
@ -27,10 +27,7 @@ build-assets-image:
stage: build-images
needs: ["compile-production-assets"]
script:
# TODO: Change the image tag to be the MD5 of assets files and skip image building if the image exists
# We'll also need to pass GITLAB_ASSETS_TAG to the trigerred omnibus-gitlab pipeline similarly to how we do it for trigerred CNG pipelines
# https://gitlab.com/gitlab-org/gitlab/issues/208389
- run_timed_command "scripts/build_assets_image"
artifacts:
expire_in: 7 days
paths:
# The `cached-assets-hash.txt` file is used in `review-build-cng-env` (`.gitlab/ci/review-apps/main.gitlab-ci.yml`)
# to pass the assets image tag to the CNG downstream pipeline.
- cached-assets-hash.txt

View File

@ -28,7 +28,6 @@
fi
fi
- assets_compile_script
- echo -n "${GITLAB_ASSETS_HASH}" > "cached-assets-hash.txt"
compile-production-assets:
extends:
@ -44,7 +43,6 @@ compile-production-assets:
# These assets are used in multiple locations:
# - in `build-assets-image` job to create assets image for packaging systems
# - GitLab UI for integration tests: https://gitlab.com/gitlab-org/gitlab-ui/-/blob/e88493b3c855aea30bf60baee692a64606b0eb1e/.storybook/preview-head.pug#L1
- cached-assets-hash.txt
- public/assets/
- "${WEBPACK_COMPILE_LOG_PATH}"
when: always
@ -75,6 +73,9 @@ update-assets-compile-production-cache:
- .assets-compile-cache-push
- .shared:rules:update-cache
stage: prepare
script:
- !reference [compile-production-assets, script]
- echo -n "${GITLAB_ASSETS_HASH}" > "cached-assets-hash.txt"
artifacts: {} # This job's purpose is only to update the cache.
update-assets-compile-test-cache:

View File

@ -38,6 +38,23 @@ stages:
extends:
- .gitlab-qa-install
.omnibus-env:
variables:
BUILD_ENV: build.env
script:
- |
SECURITY_SOURCES=$([[ ! "$CI_PROJECT_NAMESPACE" =~ ^gitlab-org\/security ]] || echo "true")
echo "SECURITY_SOURCES=${SECURITY_SOURCES:-false}" > $BUILD_ENV
echo "OMNIBUS_GITLAB_CACHE_UPDATE=${OMNIBUS_GITLAB_CACHE_UPDATE:-false}" >> $BUILD_ENV
for version_file in *_VERSION; do echo "$version_file=$(cat $version_file)" >> $BUILD_ENV; done
echo "OMNIBUS_GITLAB_RUBY3_BUILD=${OMNIBUS_GITLAB_RUBY3_BUILD:-false}" >> $BUILD_ENV
echo "OMNIBUS_GITLAB_CACHE_EDITION=${OMNIBUS_GITLAB_CACHE_EDITION:-GITLAB}" >> $BUILD_ENV
echo "Built environment file for omnibus build:"
cat $BUILD_ENV
artifacts:
reports:
dotenv: $BUILD_ENV
.update-script:
script:
- export QA_COMMAND="bundle exec gitlab-qa Test::Omnibus::UpdateFromPrevious $RELEASE $GITLAB_VERSION $UPDATE_TYPE -- $QA_RSPEC_TAGS $RSPEC_REPORT_OPTS"
@ -91,42 +108,9 @@ dont-interrupt-me:
trigger-omnibus-env:
extends:
- .omnibus-env
- .rules:omnibus-build
stage: .pre
needs:
# We need this job because we need its `cached-assets-hash.txt` artifact, so that we can pass the assets image tag to the downstream omnibus-gitlab pipeline.
- pipeline: $PARENT_PIPELINE_ID
job: build-assets-image
variables:
BUILD_ENV: build.env
before_script:
- |
# This is duplicating the function from `scripts/utils.sh` since that file can be included in other projects.
function assets_image_tag() {
local cache_assets_hash_file="cached-assets-hash.txt"
if [[ -n "${CI_COMMIT_TAG}" ]]; then
echo -n "${CI_COMMIT_REF_NAME}"
elif [[ -f "${cache_assets_hash_file}" ]]; then
echo -n "assets-hash-$(cat ${cache_assets_hash_file} | cut -c1-10)"
else
echo -n "${CI_COMMIT_SHA}"
fi
}
script:
- |
SECURITY_SOURCES=$([[ ! "$CI_PROJECT_NAMESPACE" =~ ^gitlab-org\/security ]] || echo "true")
echo "SECURITY_SOURCES=${SECURITY_SOURCES:-false}" > $BUILD_ENV
echo "OMNIBUS_GITLAB_CACHE_UPDATE=${OMNIBUS_GITLAB_CACHE_UPDATE:-false}" >> $BUILD_ENV
for version_file in *_VERSION; do echo "$version_file=$(cat $version_file)" >> $BUILD_ENV; done
echo "OMNIBUS_GITLAB_RUBY3_BUILD=${OMNIBUS_GITLAB_RUBY3_BUILD:-false}" >> $BUILD_ENV
echo "OMNIBUS_GITLAB_CACHE_EDITION=${OMNIBUS_GITLAB_CACHE_EDITION:-GITLAB}" >> $BUILD_ENV
echo "GITLAB_ASSETS_TAG=$(assets_image_tag)" >> $BUILD_ENV
echo "Built environment file for omnibus build:"
cat $BUILD_ENV
artifacts:
reports:
dotenv: $BUILD_ENV
trigger-omnibus:
extends: .rules:omnibus-build
@ -144,7 +128,6 @@ trigger-omnibus:
GITLAB_SHELL_VERSION: $GITLAB_SHELL_VERSION
GITLAB_WORKHORSE_VERSION: $GITLAB_WORKHORSE_VERSION
GITLAB_VERSION: $CI_COMMIT_SHA
GITLAB_ASSETS_TAG: $GITLAB_ASSETS_TAG
IMAGE_TAG: $CI_COMMIT_SHA
TOP_UPSTREAM_SOURCE_PROJECT: $CI_PROJECT_PATH
SECURITY_SOURCES: $SECURITY_SOURCES

View File

@ -74,8 +74,6 @@ e2e:package-and-test:
- build-qa-image
- e2e-test-pipeline-generate
variables:
# This is needed by `trigger-omnibus-env` (`.gitlab/ci/package-and-test/main.gitlab-ci.yml`).
PARENT_PIPELINE_ID: $CI_PIPELINE_ID
SKIP_MESSAGE: Skipping package-and-test due to mr containing only quarantine changes!
RELEASE: "${REGISTRY_HOST}/${REGISTRY_GROUP}/build/omnibus-gitlab-mirror/gitlab-ee:${CI_COMMIT_SHA}"
GITLAB_QA_IMAGE: "${CI_REGISTRY_IMAGE}/gitlab-ee-qa:${CI_COMMIT_SHA}"

View File

@ -34,24 +34,18 @@ review-build-cng-env:
- .review:rules:review-build-cng
image: ${GITLAB_DEPENDENCY_PROXY_ADDRESS}ruby:3.0-alpine3.13
stage: prepare
needs:
# We need this job because we need its `cached-assets-hash.txt` artifact, so that we can pass the assets image tag to the downstream CNG pipeline.
- pipeline: $PARENT_PIPELINE_ID
job: build-assets-image
variables:
BUILD_ENV: build.env
needs: []
before_script:
- source ./scripts/utils.sh
- install_gitlab_gem
script:
- 'ruby -r./scripts/trigger-build.rb -e "puts Trigger.variables_for_env_file(Trigger::CNG.new.variables)" > $BUILD_ENV'
- echo "GITLAB_ASSETS_TAG=$(assets_image_tag)" >> $BUILD_ENV
- cat $BUILD_ENV
- 'ruby -r./scripts/trigger-build.rb -e "puts Trigger.variables_for_env_file(Trigger::CNG.new.variables)" > build.env'
- cat build.env
artifacts:
reports:
dotenv: $BUILD_ENV
dotenv: build.env
paths:
- $BUILD_ENV
- build.env
expire_in: 7 days
when: always

View File

@ -29,8 +29,6 @@ start-review-app-pipeline:
# They need to be explicitly passed on to the child pipeline.
# https://docs.gitlab.com/ee/ci/pipelines/multi_project_pipelines.html#pass-cicd-variables-to-a-downstream-pipeline-by-using-the-variables-keyword
variables:
# This is needed by `review-build-cng-env` (`.gitlab/ci/review-apps/main.gitlab-ci.yml`).
PARENT_PIPELINE_ID: $CI_PIPELINE_ID
SCHEDULE_TYPE: $SCHEDULE_TYPE
DAST_RUN: $DAST_RUN
SKIP_MESSAGE: Skipping review-app due to mr containing only quarantine changes!

View File

@ -1,4 +1,4 @@
# Simple container to store assets for later use
FROM scratch
COPY public/assets /assets/
ADD public/assets /assets/
CMD /bin/true

View File

@ -1,7 +1,10 @@
<script>
import { GlAlert, GlButton, GlEmptyState, GlLink, GlSprintf, GlTable } from '@gitlab/ui';
import { setUrlParams } from '~/lib/utils/url_utility';
import { __ } from '~/locale';
const GOOGLE_CONSOLE_URL = 'https://console.cloud.google.com/iam-admin/serviceaccounts';
export default {
components: { GlAlert, GlButton, GlEmptyState, GlLink, GlSprintf, GlTable },
props: {
@ -40,6 +43,12 @@ export default {
'Enhance security by storing service account keys in secret managers - learn more about %{docLinkStart}secret management with GitLab%{docLinkEnd}',
),
},
methods: {
gcpProjectUrl(id) {
return setUrlParams({ project: id }, GOOGLE_CONSOLE_URL);
},
},
GOOGLE_CONSOLE_URL,
};
</script>
@ -59,6 +68,9 @@ export default {
<p>{{ $options.i18n.serviceAccountsDescription }}</p>
<gl-table :items="list" :fields="$options.tableFields">
<template #cell(gcp_project)="{ value }">
<gl-link :href="gcpProjectUrl(value)">{{ value }}</gl-link>
</template>
<template #cell(service_account_exists)="{ value }">
{{ value ? $options.i18n.found : $options.i18n.notFound }}
</template>

View File

@ -16,6 +16,16 @@
# NotificationService.new.async.new_issue(issue, current_user)
#
class NotificationService
# These should not be called by the MailScheduler::NotificationServiceWorker -
# what would it even mean?
EXCLUDED_ACTIONS = %i[async].freeze
def self.permitted_actions
@permitted_actions ||= gitlab_extensions.flat_map do |klass|
klass.public_instance_methods(false) - EXCLUDED_ACTIONS
end.to_set
end
class Async
attr_reader :parent

View File

@ -18,6 +18,12 @@ module MailScheduler
def perform(meth, *args)
check_arguments!(args)
if ::Feature.enabled?(:verify_mail_scheduler_notification_service_worker_method_names) &&
NotificationService.permitted_actions.exclude?(meth.to_sym)
raise(ArgumentError, "#{meth} not allowed for #{self.class.name}")
end
deserialized_args = ActiveJob::Arguments.deserialize(args)
notification_service.public_send(meth, *deserialized_args) # rubocop:disable GitlabSecurity/PublicSend
rescue ActiveJob::DeserializationError

View File

@ -0,0 +1,8 @@
---
name: verify_mail_scheduler_notification_service_worker_method_names
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/103785
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/371470
milestone: '15.6'
type: development
group: group::scalability
default_enabled: false

View File

@ -35,6 +35,12 @@ module InjectEnterpriseEditionModule
include_mod_with(name) # rubocop: disable Cop/InjectEnterpriseEditionModule
end
def gitlab_extensions
extensions = [self]
each_extension_for(name, Object) { |c| extensions << c }
extensions
end
private
def prepend_module(mod, with_descendants)

View File

@ -27,6 +27,8 @@ metadata:
description: Operations related to the GitLab agent for Kubernetes
- name: clusters
description: Operations related to clusters
- name: container_registry
description: Operations related to container registry
- name: dependency_proxy
description: Operations to manage dependency proxy for a groups
- name: deploy_keys

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
class SanitizeConfidentialNoteTodos < Gitlab::Database::Migration[2.0]
restrict_gitlab_migration gitlab_schema: :gitlab_main
MIGRATION = 'SanitizeConfidentialTodos'
DELAY_INTERVAL = 2.minutes.to_i
BATCH_SIZE = 200
MAX_BATCH_SIZE = 1000
SUB_BATCH_SIZE = 20
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main
def up
queue_batched_background_migration(
MIGRATION,
:notes,
:id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
max_batch_size: MAX_BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE,
gitlab_schema: :gitlab_main
)
end
def down
delete_batched_background_migration(MIGRATION, :notes, :id, [])
end
end

View File

@ -0,0 +1 @@
d0a14750dfcf3bd7641c9f37fbf5f992d4d7be7be33565ed9dd14eb12a983005

View File

@ -185,7 +185,7 @@ Learn how to install, configure, update, and maintain your GitLab instance.
## Git configuration options
- [Server hooks](server_hooks.md): Server hooks (on the file system) for when webhooks aren't enough.
- [Git server hooks](server_hooks.md): Git server hooks (on the file system) for when webhooks aren't enough. Previously called server hooks.
- [Git LFS configuration](lfs/index.md): Learn how to configure LFS for GitLab.
- [Housekeeping](housekeeping.md): Keep your Git repositories tidy and fast.
- [Configuring Git Protocol v2](git_protocol.md): Git protocol version 2 support.

View File

@ -5,17 +5,18 @@ info: To determine the technical writer assigned to the Stage/Group associated w
disqus_identifier: 'https://docs.gitlab.com/ee/administration/custom_hooks.html'
---
# Server hooks **(FREE SELF)**
# Git server hooks **(FREE SELF)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/196051) in GitLab 12.8 replacing Custom Hooks.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/196051) in GitLab 12.8 replacing Custom Hooks.
> - [Renamed](https://gitlab.com/gitlab-org/gitlab/-/issues/372991) from server hooks to Git server hooks in GitLab 15.6.
Server hooks (not to be confused with [system hooks](system_hooks.md) or [file hooks](file_hooks.md)) run custom logic
Git server hooks (not to be confused with [system hooks](system_hooks.md) or [file hooks](file_hooks.md)) run custom logic
on the GitLab server. You can use them to run Git-related tasks such as:
- Enforcing specific commit policies.
- Performing tasks based on the state of the repository.
Server hooks use `pre-receive`, `post-receive`, and `update`
Git server hooks use `pre-receive`, `post-receive`, and `update`
[Git server-side hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks#_server_side_hooks).
GitLab administrators configure server hooks on the file system of the GitLab server. If you don't have file system access,
@ -47,7 +48,7 @@ To create server hooks for a repository:
- To create many server hooks, create a directory for the hooks that matches the hook type. For example, for a
`pre-receive` server hook, the directory name should be `pre-receive.d`. Put the files for the hook in that directory.
1. Make the server hook files executable and ensure that they are owned by the Git user.
1. Write the code to make the server hook function as expected. Server hooks can be in any programming language. Ensure
1. Write the code to make the server hook function as expected. Git server hooks can be in any programming language. Ensure
the [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) at the top reflects the language type. For
example, if the script is in Ruby the shebang is probably `#!/usr/bin/env ruby`.
1. Make the hook file executable, ensure that it's owned by the Git user, and ensure it does not match the backup file
@ -88,7 +89,7 @@ To create a global server hook for all repositories:
1. On the GitLab server, go to the configured global server hook directory.
1. In the configured global server hook directory, create a directory for the hooks that matches the hook type. For example, for a `pre-receive` server hook, the directory name should be `pre-receive.d`.
1. Inside this new directory, add your server hooks. Server hooks can be in any programming language. Ensure the
1. Inside this new directory, add your server hooks. Git server hooks can be in any programming language. Ensure the
[shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) at the top reflects the language type. For example, if the
script is in Ruby the shebang is probably `#!/usr/bin/env ruby`.
1. Make the hook file executable, ensure that it's owned by the Git user, and ensure it does not match the backup file

View File

@ -267,7 +267,7 @@ to use them as normal characters in a match condition.
## Related topics
- [Server hooks](../../../administration/server_hooks.md), to create complex custom push rules
- [Git server hooks](../../../administration/server_hooks.md) (previously called server hooks), to create complex custom push rules
- [Signing commits with GPG](gpg_signed_commits/index.md)
- [Protected branches](../protected_branches.md)

View File

@ -208,6 +208,7 @@ module API
mount ::API::GoProxy
mount ::API::GroupAvatar
mount ::API::GroupClusters
mount ::API::GroupContainerRepositories
mount ::API::GroupExport
mount ::API::GroupImport
mount ::API::GroupPackages
@ -282,7 +283,6 @@ module API
mount ::API::Events
mount ::API::GenericPackages
mount ::API::GroupBoards
mount ::API::GroupContainerRepositories
mount ::API::GroupDebianDistributions
mount ::API::GroupLabels
mount ::API::GroupMilestones

View File

@ -12,13 +12,13 @@ module API
class Repository < Grape::Entity
include ::API::Helpers::RelatedResourcesHelpers
expose :id
expose :name
expose :path
expose :project_id
expose :location
expose :created_at
expose :expiration_policy_started_at, as: :cleanup_policy_started_at
expose :id, documentation: { type: 'integer', example: 1 }
expose :name, documentation: { type: 'string', example: 'releases' }
expose :path, documentation: { type: 'string', example: 'group/project/releases' }
expose :project_id, documentation: { type: 'integer', example: 9 }
expose :location, documentation: { type: 'string', example: 'gitlab.example.com/group/project/releases' }
expose :created_at, documentation: { type: 'dateTime', example: '2019-01-10T13:39:08.229Z' }
expose :expiration_policy_started_at, as: :cleanup_policy_started_at, documentation: { type: 'dateTime', example: '2020-08-17T03:12:35.489Z' }
expose :tags_count, if: -> (_, options) { options[:tags_count] }
expose :tags, using: Tag, if: -> (_, options) { options[:tags] }
expose :delete_api_path, if: ->(object, options) { Ability.allowed?(options[:user], :admin_container_image, object) }

View File

@ -16,12 +16,19 @@ module API
tag_name: API::NO_SLASH_URL_PART_REGEX)
params do
requires :id, type: String, desc: "Group's ID or path"
requires :id, types: [String, Integer],
desc: 'The ID or URL-encoded path of the group accessible by the authenticated user'
end
resource :groups, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
desc 'Get a list of all repositories within a group' do
detail 'This feature was introduced in GitLab 12.2.'
desc 'List registry repositories within a group' do
detail 'Get a list of registry repositories in a group. This feature was introduced in GitLab 12.2.'
success Entities::ContainerRegistry::Repository
failure [
{ code: 401, message: 'Unauthorized' },
{ code: 404, message: 'Group Not Found' }
]
is_array true
tags %w[container_registry]
end
params do
use :pagination

View File

@ -0,0 +1,52 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Iterates through confidential notes and removes any its todos if user can
# not read the note
#
# Warning: This migration is not properly isolated. The reason for this is
# that we need to check permission for notes and it would be difficult
# to extract all related logic.
# Details in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87908#note_952459215
class SanitizeConfidentialTodos < BatchedMigrationJob
scope_to ->(relation) { relation.where(confidential: true) }
operation_name :delete_invalid_todos
def perform
each_sub_batch do |sub_batch|
delete_ids = invalid_todo_ids(sub_batch)
Todo.where(id: delete_ids).delete_all if delete_ids.present?
end
end
private
def invalid_todo_ids(notes_batch)
todos = Todo.where(note_id: notes_batch.select(:id)).includes(:note, :user)
todos.each_with_object([]) do |todo, ids|
ids << todo.id if invalid_todo?(todo)
end
end
def invalid_todo?(todo)
return false unless todo.note
return false if Ability.allowed?(todo.user, :read_todo, todo)
logger.info(
message: "#{self.class.name} deleting invalid todo",
attributes: todo.attributes
)
true
end
def logger
@logger ||= Gitlab::BackgroundMigration::Logger.build
end
end
end
end

View File

@ -1,82 +1,36 @@
#!/bin/sh
. scripts/utils.sh
# Exit early if we don't want to build the image
if [ "${BUILD_ASSETS_IMAGE}" != "true" ]
if [[ "${BUILD_ASSETS_IMAGE}" != "true" ]]
then
exit 0
fi
get_repository_id() {
repository_name="${1}"
repositories_url="${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/registry/repositories"
curl --header "PRIVATE-TOKEN: ${PROJECT_TOKEN_FOR_CI_SCRIPTS_API_USAGE}" "${repositories_url}" | jq "map(select(.name == \"${repository_name}\")) | .[0].id"
}
# Generate the image name based on the project this is being run in
ASSETS_IMAGE_NAME="gitlab-assets-ce"
# `dev.gitlab-org` still has gitlab-ee.
if [ "${CI_PROJECT_NAME}" = "gitlab" ] || [ "${CI_PROJECT_NAME}" = "gitlab-ee" ]
if [[ "${CI_PROJECT_NAME}" == "gitlab" ]] || [[ "${CI_PROJECT_NAME}" == "gitlab-ee" ]]
then
ASSETS_IMAGE_NAME="gitlab-assets-ee"
fi
ASSETS_IMAGE_PATH="${CI_REGISTRY}/${CI_PROJECT_PATH}/${ASSETS_IMAGE_NAME}"
COMMIT_ASSETS_HASH_TAG="$(assets_image_tag)"
COMMIT_ASSETS_HASH_DESTINATION="${ASSETS_IMAGE_PATH}:${COMMIT_ASSETS_HASH_TAG}"
DESTINATIONS="--destination=${COMMIT_ASSETS_HASH_DESTINATION}"
SKIP_ASSETS_IMAGE_BUILDING_IF_ALREADY_EXIST="true"
# Also tag the image with GitLab version, if running on a tag pipeline
# (and thus skip the performance optimization in that case), for back-compatibility.
if [ -n "${CI_COMMIT_TAG}" ]; then
COMMIT_REF_NAME_DESTINATION="${ASSETS_IMAGE_PATH}:${CI_COMMIT_REF_NAME}"
DESTINATIONS="$DESTINATIONS --destination=$COMMIT_REF_NAME_DESTINATION"
SKIP_ASSETS_IMAGE_BUILDING_IF_ALREADY_EXIST="false"
fi
# The auto-deploy branch process still fetch assets image tagged with $CI_COMMIT_SHA,
# so we need to push the image with it (and thus skip the performance optimization in that case),
# for back-compatibility.
if echo "${CI_COMMIT_BRANCH}" | grep -Eq "^[0-9]+-[0-9]+-auto-deploy-[0-9]+$"; then
COMMIT_SHA_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_SHA}
DESTINATIONS="$DESTINATIONS --destination=$COMMIT_SHA_DESTINATION"
SKIP_ASSETS_IMAGE_BUILDING_IF_ALREADY_EXIST="false"
fi
if [ "${SKIP_ASSETS_IMAGE_BUILDING_IF_ALREADY_EXIST}" = "true" ] && [ -n "${PROJECT_TOKEN_FOR_CI_SCRIPTS_API_USAGE}" ]; then
echoinfo "Checking if the ${COMMIT_ASSETS_HASH_DESTINATION} image exists..."
repository_id=$(get_repository_id "${ASSETS_IMAGE_NAME}")
if [ -n "${repository_id}" ]; then
api_image_url="${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/registry/repositories/${repository_id}/tags/${COMMIT_ASSETS_HASH_TAG}"
echoinfo "api_image_url: ${api_image_url}"
if test_url "${api_image_url}" "--header \"PRIVATE-TOKEN: ${PROJECT_TOKEN_FOR_CI_SCRIPTS_API_USAGE}\""; then
echosuccess "Image ${COMMIT_ASSETS_HASH_DESTINATION} already exists, no need to rebuild it."
exit 0
else
echoinfo "Image ${COMMIT_ASSETS_HASH_DESTINATION} doesn't exist, we'll need to build it."
fi
else
echoerr "Repository ID couldn't be found for the '${ASSETS_IMAGE_NAME}' image!"
fi
else
echoinfo "The 'PROJECT_TOKEN_FOR_CI_SCRIPTS_API_USAGE' variable is not present, so we cannot check if the image already exists."
fi
ASSETS_IMAGE_PATH=${CI_REGISTRY}/${CI_PROJECT_PATH}/${ASSETS_IMAGE_NAME}
mkdir -p assets_container.build/public
cp -r public/assets assets_container.build/public/
cp Dockerfile.assets assets_container.build/
echo "Building assets image for destinations: ${DESTINATIONS}"
COMMIT_REF_SLUG_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_REF_SLUG}
/kaniko/executor \
--context="assets_container.build" \
--dockerfile="assets_container.build/Dockerfile.assets" \
${DESTINATIONS}
COMMIT_SHA_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_SHA}
COMMIT_REF_NAME_DESTINATION=${ASSETS_IMAGE_PATH}:${CI_COMMIT_REF_NAME}
DESTINATIONS="--destination=$COMMIT_REF_SLUG_DESTINATION --destination=$COMMIT_SHA_DESTINATION"
# Also tag the image with GitLab version, if running on a tag pipeline, so
# other projects can simply use that instead of computing the slug.
if [ -n "$CI_COMMIT_TAG" ]; then
DESTINATIONS="$DESTINATIONS --destination=$COMMIT_REF_NAME_DESTINATION"
fi
echo "building assets image for destinations: $DESTINATIONS"
/kaniko/executor --context=assets_container.build --dockerfile=assets_container.build/Dockerfile.assets $DESTINATIONS

View File

@ -160,8 +160,6 @@ module Trigger
end
class CNG < Base
ASSETS_HASH = "cached-assets-hash.txt"
def variables
# Delete variables that aren't useful when using native triggers.
super.tap do |hash|
@ -189,6 +187,7 @@ module Trigger
"TRIGGER_BRANCH" => ref,
"GITLAB_VERSION" => ENV['CI_COMMIT_SHA'],
"GITLAB_TAG" => ENV['CI_COMMIT_TAG'], # Always set a value, even an empty string, so that the downstream pipeline can correctly check it.
"GITLAB_ASSETS_TAG" => ENV['CI_COMMIT_TAG'] ? ENV['CI_COMMIT_REF_NAME'] : ENV['CI_COMMIT_SHA'],
"FORCE_RAILS_IMAGE_BUILDS" => 'true',
"CE_PIPELINE" => Trigger.ee? ? nil : "true", # Always set a value, even an empty string, so that the downstream pipeline can correctly check it.
"EE_PIPELINE" => Trigger.ee? ? "true" : nil # Always set a value, even an empty string, so that the downstream pipeline can correctly check it.

View File

@ -15,11 +15,9 @@ function retry() {
function test_url() {
local url="${1}"
local curl_args="${2}"
local status
local cmd="curl ${curl_args} --output /dev/null -L -s -w ''%{http_code}'' \"${url}\""
status=$(eval "${cmd}")
status=$(curl --output /dev/null -L -s -w ''%{http_code}'' "${url}")
if [[ $status == "200" ]]; then
return 0
@ -205,16 +203,3 @@ function danger_as_local() {
# We need to base SHA to help danger determine the base commit for this shallow clone.
bundle exec danger dry_run --fail-on-errors=true --verbose --base="${CI_MERGE_REQUEST_DIFF_BASE_SHA}" --head="${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA:-$CI_COMMIT_SHA}" --dangerfile="${DANGER_DANGERFILE:-Dangerfile}"
}
# We're inlining this function in `.gitlab/ci/package-and-test/main.gitlab-ci.yml` since this file can be included in other projects.
function assets_image_tag() {
local cache_assets_hash_file="cached-assets-hash.txt"
if [[ -n "${CI_COMMIT_TAG}" ]]; then
echo -n "${CI_COMMIT_REF_NAME}"
elif [[ -f "${cache_assets_hash_file}" ]]; then
echo -n "assets-hash-$(cat ${cache_assets_hash_file} | cut -c1-10)"
else
echo -n "${CI_COMMIT_SHA}"
fi
}

View File

@ -126,4 +126,22 @@ RSpec.describe InjectEnterpriseEditionModule do
describe '#include_mod' do
it_behaves_like 'expand the assumed extension with', :include
end
describe '#gitlab_extensions' do
context 'when there are no extension modules' do
it 'returns the class itself' do
expect(fish_class.gitlab_extensions).to contain_exactly(fish_class)
end
end
context 'when there are extension modules' do
it 'returns the class itself and any extensions' do
stub_const(extension_name, extension_namespace)
extension_namespace.const_set(fish_name, fish_extension)
fish_class.prepend_mod
expect(fish_class.gitlab_extensions).to contain_exactly(fish_class, fish_extension)
end
end
end
end

View File

@ -31,7 +31,8 @@ RSpec.describe 'Broadcast Messages' do
expect(page).not_to have_content 'SampleMessage'
end
it 'broadcast message is still hidden after refresh', :js do
it 'broadcast message is still hidden after refresh', :js,
quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/347118' do
visit root_path
find('.js-dismiss-current-broadcast-notification').click

View File

@ -10,7 +10,8 @@ RSpec.describe 'top nav tooltips', :js do
visit explore_projects_path
end
it 'clicking new dropdown hides tooltip', :aggregate_failures do
it 'clicking new dropdown hides tooltip', :aggregate_failures,
quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/382786' do
btn = '#js-onboarding-new-project-link'
page.find(btn).hover

View File

@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
import { GlAlert, GlButton, GlEmptyState, GlTable } from '@gitlab/ui';
import { GlAlert, GlButton, GlEmptyState, GlLink, GlTable } from '@gitlab/ui';
import ServiceAccountsList from '~/google_cloud/service_accounts/list.vue';
describe('google_cloud/service_accounts/list', () => {
@ -45,7 +45,26 @@ describe('google_cloud/service_accounts/list', () => {
beforeEach(() => {
const propsData = {
list: [{}, {}, {}],
list: [
{
ref: '*',
gcp_project: 'gcp-project-123',
service_account_exists: true,
service_account_key_exists: true,
},
{
ref: 'prod',
gcp_project: 'gcp-project-456',
service_account_exists: true,
service_account_key_exists: true,
},
{
ref: 'stag',
gcp_project: 'gcp-project-789',
service_account_exists: true,
service_account_key_exists: true,
},
],
createUrl: '#create-url',
emptyIllustrationUrl: '#empty-illustration-url',
};
@ -68,6 +87,12 @@ describe('google_cloud/service_accounts/list', () => {
expect(findRows().length).toBe(4);
});
it('table row must contain link to the google cloud console', () => {
expect(findRows().at(1).findComponent(GlLink).attributes('href')).toBe(
`${ServiceAccountsList.GOOGLE_CONSOLE_URL}?project=gcp-project-123`,
);
});
it('shows the link to create new service accounts', () => {
const button = findButton();
expect(button.exists()).toBe(true);

View File

@ -0,0 +1,89 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::SanitizeConfidentialTodos, :migration, schema: 20221110045406 do
let(:todos) { table(:todos) }
let(:notes) { table(:notes) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:project_features) { table(:project_features) }
let(:users) { table(:users) }
let(:issues) { table(:issues) }
let(:members) { table(:members) }
let(:project_authorizations) { table(:project_authorizations) }
let(:user) { users.create!(first_name: 'Test', last_name: 'User', email: 'test@user.com', projects_limit: 1) }
let(:project_namespace1) { namespaces.create!(path: 'pns1', name: 'pns1') }
let(:project_namespace2) { namespaces.create!(path: 'pns2', name: 'pns2') }
let(:project1) do
projects.create!(namespace_id: project_namespace1.id,
project_namespace_id: project_namespace1.id, visibility_level: 20)
end
let(:project2) do
projects.create!(namespace_id: project_namespace2.id,
project_namespace_id: project_namespace2.id)
end
let(:issue1) { issues.create!(project_id: project1.id, issue_type: 1, title: 'issue1', author_id: user.id) }
let(:issue2) { issues.create!(project_id: project2.id, issue_type: 1, title: 'issue2') }
let(:public_note) { notes.create!(note: 'text', project_id: project1.id) }
let(:confidential_note) do
notes.create!(note: 'text', project_id: project1.id, confidential: true,
noteable_id: issue1.id, noteable_type: 'Issue')
end
let(:other_confidential_note) do
notes.create!(note: 'text', project_id: project2.id, confidential: true,
noteable_id: issue2.id, noteable_type: 'Issue')
end
let(:common_params) { { user_id: user.id, author_id: user.id, action: 1, state: 'pending', target_type: 'Note' } }
let!(:ignored_todo1) { todos.create!(**common_params) }
let!(:ignored_todo2) { todos.create!(**common_params, target_id: public_note.id, note_id: public_note.id) }
let!(:valid_todo) { todos.create!(**common_params, target_id: confidential_note.id, note_id: confidential_note.id) }
let!(:invalid_todo) do
todos.create!(**common_params, target_id: other_confidential_note.id, note_id: other_confidential_note.id)
end
describe '#perform' do
before do
project_features.create!(project_id: project1.id, issues_access_level: 20, pages_access_level: 20)
members.create!(state: 0, source_id: project1.id, source_type: 'Project',
type: 'ProjectMember', user_id: user.id, access_level: 50, notification_level: 0,
member_namespace_id: project_namespace1.id)
project_authorizations.create!(project_id: project1.id, user_id: user.id, access_level: 50)
end
subject(:perform) do
described_class.new(
start_id: notes.minimum(:id),
end_id: notes.maximum(:id),
batch_table: :notes,
batch_column: :id,
sub_batch_size: 1,
pause_ms: 0,
connection: ApplicationRecord.connection
).perform
end
it 'deletes todos where user can not read its note and logs deletion', :aggregate_failures do
expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger|
expect(logger).to receive(:info).with(
hash_including(
message: "#{described_class.name} deleting invalid todo",
attributes: hash_including(invalid_todo.attributes.slice(:id, :user_id, :target_id, :target_type))
)
).once
end
expect { perform }.to change(todos, :count).by(-1)
expect(todos.all).to match_array([ignored_todo1, ignored_todo2, valid_todo])
end
end
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe SanitizeConfidentialNoteTodos do
let(:migration) { described_class::MIGRATION }
describe '#up' do
it 'schedules a batched background migration' do
migrate!
expect(migration).to have_scheduled_batched_migration(
table_name: :notes,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
max_batch_size: described_class::MAX_BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
end
end
describe '#down' do
it 'deletes all batched migration records' do
migrate!
schema_migrate_down!
expect(migration).not_to have_scheduled_batched_migration
end
end
end

View File

@ -57,5 +57,13 @@ RSpec.describe API::GroupContainerRepositories do
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'with URL-encoded path of the group' do
let(:url) { "/groups/#{group.full_path}/registry/repositories" }
it_behaves_like 'rejected container repository access', :guest, :forbidden
it_behaves_like 'rejected container repository access', :anonymous, :not_found
it_behaves_like 'returns repositories for allowed users', :reporter
end
end
end

View File

@ -319,6 +319,28 @@ RSpec.describe Trigger do
end
end
describe "GITLAB_ASSETS_TAG" do
context 'when CI_COMMIT_TAG is set' do
before do
stub_env('CI_COMMIT_TAG', 'v1.0')
end
it 'sets GITLAB_ASSETS_TAG to CI_COMMIT_REF_NAME' do
expect(subject.variables['GITLAB_ASSETS_TAG']).to eq(env['CI_COMMIT_REF_NAME'])
end
end
context 'when CI_COMMIT_TAG is nil' do
before do
stub_env('CI_COMMIT_TAG', nil)
end
it 'sets GITLAB_ASSETS_TAG to CI_COMMIT_SHA' do
expect(subject.variables['GITLAB_ASSETS_TAG']).to eq(env['CI_COMMIT_SHA'])
end
end
end
describe "CE_PIPELINE" do
context 'when Trigger.ee? is true' do
before do

View File

@ -211,6 +211,23 @@ RSpec.describe NotificationService, :mailer do
it_behaves_like 'participating by assignee notification'
end
describe '.permitted_actions' do
it 'includes public methods' do
expect(described_class.permitted_actions).to include(:access_token_created)
end
it 'excludes EXCLUDED_ACTIONS' do
described_class::EXCLUDED_ACTIONS.each do |action|
expect(described_class.permitted_actions).not_to include(action)
end
end
it 'excludes protected and private methods' do
expect(described_class.permitted_actions).not_to include(:new_resource_email)
expect(described_class.permitted_actions).not_to include(:approve_mr_email)
end
end
describe '#async' do
let(:async) { notification.async }

View File

@ -42,9 +42,42 @@ RSpec.describe MailScheduler::NotificationServiceWorker do
end
end
context 'when the method is not a public method' do
it 'raises NoMethodError' do
expect { worker.perform('notifiable?', *serialize(key)) }.to raise_error(NoMethodError)
context 'when the method is allowed' do
it 'calls the method on NotificationService' do
NotificationService.permitted_actions.each do |action|
expect(worker.notification_service).to receive(action).with(key)
worker.perform(action, *serialize(key))
end
end
end
context 'when the method is not allowed' do
context 'when verify_mail_scheduler_notification_service_worker_method_names is enabled' do
it 'raises ArgumentError' do
expect(worker.notification_service).not_to receive(:async)
expect(worker.notification_service).not_to receive(:foo)
expect { worker.perform('async', *serialize(key)) }
.to raise_error(ArgumentError, 'async not allowed for MailScheduler::NotificationServiceWorker')
expect { worker.perform('foo', *serialize(key)) }
.to raise_error(ArgumentError, 'foo not allowed for MailScheduler::NotificationServiceWorker')
end
end
context 'when verify_mail_scheduler_notification_service_worker_method_names is disabled' do
before do
stub_feature_flags(verify_mail_scheduler_notification_service_worker_method_names: false)
end
it 'forwards the argument to the service' do
expect(worker.notification_service).to receive(:async)
expect(worker.notification_service).to receive(:foo)
worker.perform('async', *serialize(key))
worker.perform('foo', *serialize(key))
end
end
end
end