Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-08-03 21:09:39 +00:00
parent 4905885126
commit 4bf395cded
122 changed files with 1913 additions and 493 deletions

View File

@ -446,9 +446,10 @@ Graphql/OldTypes:
Include:
- 'app/graphql/**/*'
- 'ee/app/graphql/**/*'
Exclude:
- 'spec/**/*.rb'
- 'ee/spec/**/*.rb'
- 'spec/graphql/**/*'
- 'spec/requests/api/graphql/**/*'
- 'ee/spec/graphql/**/*'
- 'ee/spec/requests/api/graphql/**/*'
RSpec/EnvAssignment:
Enable: true

View File

@ -23,23 +23,6 @@ Graphql/Descriptions:
- 'ee/app/graphql/types/vulnerability_severity_enum.rb'
- 'ee/app/graphql/types/vulnerability_state_enum.rb'
# WIP See https://gitlab.com/gitlab-org/gitlab/-/issues/336292
Graphql/OldTypes:
Exclude:
- 'spec/**/*.rb'
- 'ee/spec/**/*.rb'
- 'ee/app/graphql/ee/mutations/ci/ci_cd_settings_update.rb'
- 'ee/app/graphql/ee/resolvers/issues_resolver.rb'
- 'ee/app/graphql/ee/resolvers/namespace_projects_resolver.rb'
- 'ee/app/graphql/ee/types/board_list_type.rb'
- 'ee/app/graphql/ee/types/boards/board_issue_input_base_type.rb'
- 'ee/app/graphql/ee/types/issue_connection_type.rb'
- 'ee/app/graphql/ee/types/issue_type.rb'
- 'ee/app/graphql/ee/types/issues/negated_issue_filter_input_type.rb'
- 'ee/app/graphql/ee/types/merge_request_type.rb'
- 'ee/app/graphql/ee/types/namespace_type.rb'
- 'ee/app/graphql/ee/types/project_type.rb'
# WIP: See https://gitlab.com/gitlab-org/gitlab/-/issues/220040
Rails/SaveBang:
Exclude:

View File

@ -2,6 +2,30 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 14.1.2 (2021-08-03)
### Security (19 changes)
- [Add project member validation for domain limitation](gitlab-org/security/gitlab@d17016dde463811c81a22c07aeab817ff7b5757c) ([merge request](gitlab-org/security/gitlab!1564))
- [Hide project-level CI/CD Analytics for Guests](gitlab-org/security/gitlab@ce3b41daadd795e906b5bbbec424a494c491a1d4) ([merge request](gitlab-org/security/gitlab!1600))
- [Only allow invite to be accepted by user with matching email](gitlab-org/security/gitlab@9d9e439c6a923fa4791a056e599c7b7e76de59a1) ([merge request](gitlab-org/security/gitlab!1632))
- [Add html escaping for default branch name](gitlab-org/security/gitlab@549101007452bd43d866d314b1c787120cfcb36a) ([merge request](gitlab-org/security/gitlab!1630))
- [Configure OmniAuth to use GitLab AppLogger](gitlab-org/security/gitlab@0b234f0058bbaa0415ab43182761757c332764d1) ([merge request](gitlab-org/security/gitlab!1615))
- [Add permissions check to pipelines#show action](gitlab-org/security/gitlab@6901d52d5265d126419e78848344ae9a886ee1a7) ([merge request](gitlab-org/security/gitlab!1612))
- [Prevent impersonation in gitlab-shell SSH certs](gitlab-org/security/gitlab@82a878ba276c6500af5aa3d951819240535127de) ([merge request](gitlab-org/security/gitlab!1609))
- [Fix Protected Environment Accesses Cleanup](gitlab-org/security/gitlab@0c954547dbdee6a47fc755eebef0882852080579) ([merge request](gitlab-org/security/gitlab!1606)) **GitLab Enterprise Edition**
- [Use oauth_app id instead of uid](gitlab-org/security/gitlab@9c49cbbbc730eb16ef109c1f1fc1b167768d5dd3) ([merge request](gitlab-org/security/gitlab!1603)) **GitLab Enterprise Edition**
- [Block impersonation token use if it is not permitted](gitlab-org/security/gitlab@1a73b228549dfe1fe98f44a8cee8e3ebcc36d841) ([merge request](gitlab-org/security/gitlab!1583))
- [Fix XSS in Mermaid Markdown rendering](gitlab-org/security/gitlab@6bff57b10739c42d177371dbf44143d92de1e595) ([merge request](gitlab-org/security/gitlab!1488))
- [Do not show email address in error message](gitlab-org/security/gitlab@fdee78b193d9744253c7b7d671247cc50175c643) ([merge request](gitlab-org/security/gitlab!1596)) **GitLab Enterprise Edition**
- [Updates oauth to 0.5.6](gitlab-org/security/gitlab@bfa3de880659b0156cf8c2a7085b1705596380a4) ([merge request](gitlab-org/security/gitlab!1592))
- [Fix tag ref detection for pipelines](gitlab-org/security/gitlab@87a03ffd263ad153a911e14512cb7776b98a435d) ([merge request](gitlab-org/security/gitlab!1591))
- [Disallow non-members to set issue metadata on issue create](gitlab-org/security/gitlab@abe9d660ce3314c1540ec20b3a0640e623c56ecc) ([merge request](gitlab-org/security/gitlab!1586))
- [Prevent guests from linking issues with errors](gitlab-org/security/gitlab@4a74667407b725176c4722e86bba3f942ffc9487) ([merge request](gitlab-org/security/gitlab!1587))
- [Filter todos whose target users no longer have access to](gitlab-org/security/gitlab@a05dd90c43ae84bb37956217d9cf4effd1edae50) ([merge request](gitlab-org/security/gitlab!1556))
- [Remove impersonation token from api response for non-admin user](gitlab-org/security/gitlab@928eaf1b82d45fbfa0d82b6515d192453b944ab9) ([merge request](gitlab-org/security/gitlab!1565))
- [Restrict access to instance-level security features for reporters](gitlab-org/security/gitlab@d4097341cede050e0066fa2a5445cbf51a1cc1bd) ([merge request](gitlab-org/security/gitlab!1561)) **GitLab Enterprise Edition**
## 14.1.1 (2021-07-28)
### Added (1 change)
@ -585,6 +609,29 @@ entry.
- [Remove diffs gradual load feature flag](gitlab-org/gitlab@027d7c4327b5b6205a84281239027273517bf81b) ([merge request](gitlab-org/gitlab!55478))
- [Remove partial index for Hashed Storage migration](gitlab-org/gitlab@3ed017a1023d7b0941a7606b69e6caee8d22f15c) ([merge request](gitlab-org/gitlab!62920))
## 14.0.7 (2021-08-03)
### Security (18 changes)
- [Add project member validation for domain limitation](gitlab-org/security/gitlab@f9a0e78111cbbfe93b6f8ca27bd9f064e146d005) ([merge request](gitlab-org/security/gitlab!1563))
- [Hide project-level CI/CD Analytics for Guests](gitlab-org/security/gitlab@56a17ae80c1f179bcdf939d6b8e71737f9501949) ([merge request](gitlab-org/security/gitlab!1574))
- [Only allow invite to be accepted by user with matching email](gitlab-org/security/gitlab@a79d0e6dbbc32247c10c4928a04f0149071eb5fe) ([merge request](gitlab-org/security/gitlab!1633))
- [Add html escaping for default branch name](gitlab-org/security/gitlab@d26f0c4d5ef386100d40e92f815b7e754fccacc3) ([merge request](gitlab-org/security/gitlab!1631))
- [Configure OmniAuth to use GitLab AppLogger](gitlab-org/security/gitlab@dfcff90cb86fac0dff05d8bd5f25f46da2cc8ce0) ([merge request](gitlab-org/security/gitlab!1616))
- [Add permissions check to pipelines#show action](gitlab-org/security/gitlab@c611a8154dc5776a0767b4153ff8963d46e7f39a) ([merge request](gitlab-org/security/gitlab!1613))
- [Prevent impersonation in gitlab-shell SSH certs](gitlab-org/security/gitlab@320457b16cbfd5dec4e05937c4d61b96aba4c290) ([merge request](gitlab-org/security/gitlab!1610))
- [Fix Protected Environment Accesses Cleanup](gitlab-org/security/gitlab@99846cdeda6acf6223fb0ee5364e375765d3cbb1) ([merge request](gitlab-org/security/gitlab!1607)) **GitLab Enterprise Edition**
- [Do not show email address in error message](gitlab-org/security/gitlab@5c4adf419e38f0fd9d540d2f7cd9d14888bc6b96) ([merge request](gitlab-org/security/gitlab!1597)) **GitLab Enterprise Edition**
- [Disallow non-members to set issue metadata on issue create](gitlab-org/security/gitlab@0bb4499e5f4514beb647d0e6ac3f9b15720c42ce) ([merge request](gitlab-org/security/gitlab!1581))
- [Prevent guests from linking issues with errors](gitlab-org/security/gitlab@94462a56e9490ddd85ec7d1d869b6fda2042fb99) ([merge request](gitlab-org/security/gitlab!1588))
- [Block impersonation token use if it is not permitted](gitlab-org/security/gitlab@31b8bc506dd89a576a2cda094c711c22be764398) ([merge request](gitlab-org/security/gitlab!1584))
- [Updates oauth to 0.5.6](gitlab-org/security/gitlab@c839b6107c41bcd02e048d0ae0499c140bfbec1c) ([merge request](gitlab-org/security/gitlab!1568))
- [Remove impersonation token from api response for non-admin user](gitlab-org/security/gitlab@845dc284cc8ee8736e4f65740d61ffeb197f7b7c) ([merge request](gitlab-org/security/gitlab!1566))
- [Filter todos whose target users no longer have access to](gitlab-org/security/gitlab@a6c81e5cadb277f80d1b9565700f8b1f201cfb05) ([merge request](gitlab-org/security/gitlab!1554))
- [Fix tag ref detection for pipelines](gitlab-org/security/gitlab@cd5f61dc50c44d69896b38f3bd44129a8f1f01d8) ([merge request](gitlab-org/security/gitlab!1548))
- [Restrict access to instance-level security features for reporters](gitlab-org/security/gitlab@c8a75e8032c68065524a85f7030960b614a915bd) ([merge request](gitlab-org/security/gitlab!1539)) **GitLab Enterprise Edition**
- [Fix XSS in Mermaid Markdown rendering](gitlab-org/security/gitlab@86139e79c13cf87183cdec9f84ec114cdfc6d215) ([merge request](gitlab-org/security/gitlab!1489))
## 14.0.6 (2021-07-20)
### Fixed (4 changes)
@ -1295,6 +1342,26 @@ entry.
- [Add missing metrics information](gitlab-org/gitlab@89cd7fe3b95323e635b2d73e08549b2e6153dc4d) ([merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/61772/edit))
- [Track usage of the resolve UI](gitlab-org/gitlab@35c8e30fce288cecefcf2f7c0077d4608e696519) ([merge request](gitlab-org/gitlab!61654))
## 13.12.9 (2021-08-03)
### Security (15 changes)
- [Add project member validation for domain limitation](gitlab-org/security/gitlab@8aff1815f897c2c454c87b1ccdd98c7a2c9eedb3) ([merge request](gitlab-org/security/gitlab!1562))
- [Block impersonation token use if it is not permitted](gitlab-org/security/gitlab@99ab170ae5a2d991600dec9e7dfd8b5ca502c437) ([merge request](gitlab-org/security/gitlab!1585))
- [Hide project-level CI/CD Analytics for Guests](gitlab-org/security/gitlab@740395d9663be41d52d831b8f90e271c08137220) ([merge request](gitlab-org/security/gitlab!1575))
- [Only allow invite to be accepted by user with matching email](gitlab-org/security/gitlab@ae7ade09920486f6124496d800bf5f63f5a909eb) ([merge request](gitlab-org/security/gitlab!1634))
- [Configure OmniAuth to use GitLab AppLogger](gitlab-org/security/gitlab@ed5e7742173878e59d760744e3f4f6686268584b) ([merge request](gitlab-org/security/gitlab!1617))
- [Fix Protected Environment Accesses Cleanup](gitlab-org/security/gitlab@79eb0cb13a35864267c30663fd6033e8c6224cac) ([merge request](gitlab-org/security/gitlab!1608)) **GitLab Enterprise Edition**
- [Add permissions check to pipelines#show action](gitlab-org/security/gitlab@1a293b409226ce743527f1ac5ac5d216998339e1) ([merge request](gitlab-org/security/gitlab!1618))
- [Prevent impersonation in gitlab-shell SSH certs](gitlab-org/security/gitlab@42521d9e7e72047bac09bd42779203ae6e508227) ([merge request](gitlab-org/security/gitlab!1611))
- [Prevent guests from linking issues with errors](gitlab-org/security/gitlab@da799b0c7bcade058d4b57e065b1a1bebf903fa3) ([merge request](gitlab-org/security/gitlab!1599))
- [Do not show email address in error message](gitlab-org/security/gitlab@2c3318edaa39ed0837b8fb30acae9f2cdc3d158f) ([merge request](gitlab-org/security/gitlab!1598)) **GitLab Enterprise Edition**
- [Updates oauth to 0.5.6](gitlab-org/security/gitlab@33df3791b646026016303a9d64661fbee7563630) ([merge request](gitlab-org/security/gitlab!1569))
- [Remove impersonation token from api response for non-admin user](gitlab-org/security/gitlab@b56ae1953b2cd6b9d12c584e0f2c298a931f6f08) ([merge request](gitlab-org/security/gitlab!1567))
- [Filter todos whose target users no longer have access to](gitlab-org/security/gitlab@ba613574b12e40fb61e5fbae8b1159f9ad037e84) ([merge request](gitlab-org/security/gitlab!1555))
- [Fix tag ref detection for pipelines](gitlab-org/security/gitlab@4c36e98bcecd6e42e23ec5e20443f41de7f5bf18) ([merge request](gitlab-org/security/gitlab!1549))
- [Fix XSS in Mermaid Markdown rendering](gitlab-org/security/gitlab@b27425816723b53db2f65b39f4702711b858cdfc) ([merge request](gitlab-org/security/gitlab!1487))
## 13.12.8 (2021-07-07)
### Security (1 change)

View File

@ -1 +1 @@
13.19.0
13.19.1

View File

@ -794,7 +794,7 @@ GEM
nenv (~> 0.1)
shellany (~> 0.0)
numerizer (0.2.0)
oauth (0.5.4)
oauth (0.5.6)
oauth2 (1.4.7)
faraday (>= 0.8, < 2.0)
jwt (>= 1.0, < 3.0)

View File

@ -66,6 +66,7 @@ export function initMermaid(mermaid) {
useMaxWidth: true,
htmlLabels: true,
},
secure: ['secure', 'securityLevel', 'startOnLoad', 'maxTextSize', 'htmlLabels'],
securityLevel: 'strict',
});

View File

@ -36,7 +36,7 @@
}
.with-performance-bar .whats-new-drawer {
margin-top: calc(#{$performance-bar-height} + #{$header-height});
margin-top: $performance-bar-height + $header-height;
}
.with-system-header .whats-new-drawer {

View File

@ -2,6 +2,7 @@
class Admin::ImpersonationTokensController < Admin::ApplicationController
before_action :user
before_action :verify_impersonation_enabled!
feature_category :authentication_and_authorization
@ -41,6 +42,10 @@ class Admin::ImpersonationTokensController < Admin::ApplicationController
end
# rubocop: enable CodeReuse/ActiveRecord
def verify_impersonation_enabled!
access_denied! unless helpers.impersonation_enabled?
end
def finder(options = {})
PersonalAccessTokensFinder.new({ user: user, impersonation: true }.merge(options))
end

View File

@ -16,6 +16,8 @@ class Dashboard::TodosController < Dashboard::ApplicationController
@todos = @todos.with_entity_associations
return if redirect_out_of_range(@todos, todos_page_count(@todos))
@allowed_todos = ::Todos::AllowedTargetFilterService.new(@todos, current_user).execute
end
def destroy

View File

@ -20,7 +20,7 @@ class InvitesController < ApplicationController
end
def accept
if member.accept_invite!(current_user)
if current_user_matches_invite? && member.accept_invite!(current_user)
redirect_to invite_details[:path], notice: helpers.invite_accepted_notice(member)
else
redirect_back_or_default(options: { alert: _("The invitation could not be accepted.") })
@ -52,7 +52,7 @@ class InvitesController < ApplicationController
end
def current_user_matches_invite?
@member.invite_email == current_user.email
current_user.verified_emails.include?(@member.invite_email)
end
def member?

View File

@ -8,8 +8,8 @@ class Projects::PipelinesController < Projects::ApplicationController
before_action :pipeline, except: [:index, :new, :create, :charts, :config_variables]
before_action :set_pipeline_path, only: [:show]
before_action :authorize_read_pipeline!
before_action :authorize_read_build!, only: [:index]
before_action :authorize_read_analytics!, only: [:charts]
before_action :authorize_read_build!, only: [:index, :show]
before_action :authorize_read_ci_cd_analytics!, only: [:charts]
before_action :authorize_create_pipeline!, only: [:new, :create, :config_variables]
before_action :authorize_update_pipeline!, only: [:retry, :cancel]
before_action do

View File

@ -2,8 +2,12 @@
module Resolvers
class ProjectPipelineStatisticsResolver < BaseResolver
include Gitlab::Graphql::Authorize::AuthorizeResource
type Types::Ci::AnalyticsType, null: true
authorizes_object!
authorize :read_ci_cd_analytics
def resolve
weekly_stats = Gitlab::Ci::Charts::WeekChart.new(object)
monthly_stats = Gitlab::Ci::Charts::MonthChart.new(object)

View File

@ -34,7 +34,7 @@ module Resolvers
return Todo.none unless current_user.present? && target.present?
return Todo.none if target.is_a?(User) && target != current_user
TodosFinder.new(current_user, todo_finder_params(args)).execute
TodosFinder.new(current_user, todo_finder_params(args)).execute.with_entity_associations
end
private

View File

@ -115,11 +115,9 @@ module Types
null: true,
description: 'Runbook for the alert as defined in alert details.'
field :todos,
Types::TodoType.connection_type,
null: true,
description: 'To-do items of the current user for the alert.',
resolver: Resolvers::TodoResolver
field :todos, description: 'To-do items of the current user for the alert.', resolver: Resolvers::TodoResolver do
extension(::Gitlab::Graphql::TodosProjectPermissionPreloader::FieldExtension)
end
field :details_url,
GraphQL::Types::String,

View File

@ -82,7 +82,7 @@ module Types
snippets: 'Snippets are',
container_registry: 'Container Registry is'
}.each do |feature, name_string|
field "#{feature}_enabled", GraphQL::BOOLEAN_TYPE, null: true,
field "#{feature}_enabled", GraphQL::Types::Boolean, null: true,
description: "Indicates if #{name_string} enabled for the current user"
define_method "#{feature}_enabled" do

View File

@ -55,9 +55,6 @@ module Types
type: GraphQL::Types::String,
null: false,
description: 'Web path of the user.'
field :todos,
resolver: Resolvers::TodoResolver,
description: 'To-do items of the user.'
field :group_memberships,
type: Types::GroupMemberType.connection_type,
null: true,
@ -81,6 +78,10 @@ module Types
description: 'Projects starred by the user.',
resolver: Resolvers::UserStarredProjectsResolver
field :todos, resolver: Resolvers::TodoResolver, description: 'To-do items of the user.' do
extension(::Gitlab::Graphql::TodosProjectPermissionPreloader::FieldExtension)
end
# Merge request field: MRs can be authored, assigned, or assigned-for-review:
field :authored_merge_requests,
resolver: Resolvers::AuthoredMergeRequestsResolver,

View File

@ -266,6 +266,10 @@ module AlertManagement
end
end
def to_ability_name
'alert_management_alert'
end
private
def hook_data

View File

@ -103,4 +103,12 @@ class ApplicationRecord < ActiveRecord::Base
def self.cached_column_list
self.column_names.map { |column_name| self.arel_table[column_name] }
end
def readable_by?(user)
Ability.allowed?(user, "read_#{to_ability_name}".to_sym, self)
end
def to_ability_name
model_name.element
end
end

View File

@ -550,6 +550,10 @@ class Commit
expire_note_etag_cache_for_related_mrs
end
def readable_by?(user)
Ability.allowed?(user, :read_commit, self)
end
private
def expire_note_etag_cache_for_related_mrs

View File

@ -95,10 +95,6 @@ module ProjectFeaturesCompatibility
# attribute.
def container_registry_enabled=(value)
write_feature_attribute_boolean(:container_registry_access_level, value)
# TODO: Remove this when we remove the projects.container_registry_enabled
# column. https://gitlab.com/gitlab-org/gitlab/-/issues/335425
super
end
private

View File

@ -182,10 +182,6 @@ module DesignManagement
File.join(DesignManagement.designs_directory, "issue-#{issue.iid}", design.filename)
end
def to_ability_name
'design'
end
def description
''
end

View File

@ -714,10 +714,6 @@ class Group < Namespace
Gitlab::ServiceDesk.supported? && all_projects.service_desk_enabled.exists?
end
def to_ability_name
model_name.singular
end
def activity_path
Gitlab::Routing.url_helpers.activity_group_path(self)
end

View File

@ -544,6 +544,25 @@ class Issue < ApplicationRecord
self.update_column(:upvotes_count, self.upvotes)
end
# Returns `true` if the given User can read the current Issue.
#
# This method duplicates the same check of issue_policy.rb
# for performance reasons, check commit: 002ad215818450d2cbbc5fa065850a953dc7ada8
# Make sure to sync this method with issue_policy.rb
def readable_by?(user)
if user.can_read_all_resources?
true
elsif project.owner == user
true
elsif confidential? && !assignee_or_author?(user)
project.team.member?(user, Gitlab::Access::REPORTER)
else
project.public? ||
project.internal? && !user.external? ||
project.team.member?(user)
end
end
private
def spammable_attribute_changed?
@ -569,25 +588,6 @@ class Issue < ApplicationRecord
Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_created_action(author: author)
end
# Returns `true` if the given User can read the current Issue.
#
# This method duplicates the same check of issue_policy.rb
# for performance reasons, check commit: 002ad215818450d2cbbc5fa065850a953dc7ada8
# Make sure to sync this method with issue_policy.rb
def readable_by?(user)
if user.can_read_all_resources?
true
elsif project.owner == user
true
elsif confidential? && !assignee_or_author?(user)
project.team.member?(user, Gitlab::Access::REPORTER)
else
project.public? ||
project.internal? && !user.external? ||
project.team.member?(user)
end
end
# Returns `true` if this Issue is visible to everybody.
def publicly_visible?
project.public? && !confidential? && !::Gitlab::ExternalAuthorization.enabled?

View File

@ -384,12 +384,6 @@ class Note < ApplicationRecord
super
end
# This method is to be used for checking read permissions on a note instead of `system_note_with_references_visible_for?`
def readable_by?(user)
# note_policy accounts for #system_note_with_references_visible_for?(user) check when granting read access
Ability.allowed?(user, :read_note, self)
end
def award_emoji?
can_be_award_emoji? && contains_emoji_only?
end
@ -406,10 +400,6 @@ class Note < ApplicationRecord
note =~ /\A#{Banzai::Filter::EmojiFilter.emoji_pattern}\s?\Z/
end
def to_ability_name
model_name.singular
end
def noteable_ability_name
if for_snippet?
'snippet'

View File

@ -43,6 +43,8 @@ class Project < ApplicationRecord
extend Gitlab::ConfigHelper
ignore_columns :container_registry_enabled, remove_after: '2021-09-22', remove_with: '14.4'
BoardLimitExceeded = Class.new(StandardError)
ignore_columns :mirror_last_update_at, :mirror_last_successful_update_at, remove_after: '2021-09-22', remove_with: '14.4'
@ -1493,10 +1495,6 @@ class Project < ApplicationRecord
end
end
def to_ability_name
model_name.singular
end
# rubocop: disable CodeReuse/ServiceClass
def execute_hooks(data, hooks_scope = :push_hooks)
run_after_commit_or_now do

View File

@ -44,7 +44,18 @@ class IssuePolicy < IssuablePolicy
enable :update_subscription
end
rule { ~persisted & can?(:guest_access) }.policy do
# admin can set metadata on new issues
rule { ~persisted & admin }.policy do
enable :set_issue_metadata
end
# support bot needs to be able to set metadata on new issues when service desk is enabled
rule { ~persisted & support_bot & can?(:guest_access) }.policy do
enable :set_issue_metadata
end
# guest members need to be able to set issue metadata per https://gitlab.com/gitlab-org/gitlab/-/issues/300100
rule { ~persisted & is_project_member & can?(:guest_access) }.policy do
enable :set_issue_metadata
end

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
class PersonalAccessTokenPolicy < BasePolicy
condition(:is_owner) { user && subject.user_id == user.id }
condition(:is_owner) { user && subject.user_id == user.id && !subject.impersonation }
rule { (is_owner | admin) & ~blocked }.policy do
enable :read_token

View File

@ -284,6 +284,7 @@ class ProjectPolicy < BasePolicy
enable :read_confidential_issues
enable :read_package
enable :read_product_analytics
enable :read_ci_cd_analytics
end
# We define `:public_user_access` separately because there are cases in gitlab-ee
@ -484,6 +485,7 @@ class ProjectPolicy < BasePolicy
prevent(:read_insights)
prevent(:read_cycle_analytics)
prevent(:read_repository_graphs)
prevent(:read_ci_cd_analytics)
end
rule { wiki_disabled }.policy do
@ -559,6 +561,7 @@ class ProjectPolicy < BasePolicy
enable :read_cycle_analytics
enable :read_pages_content
enable :read_analytics
enable :read_ci_cd_analytics
enable :read_insights
# NOTE: may be overridden by IssuePolicy
@ -666,6 +669,7 @@ class ProjectPolicy < BasePolicy
rule { support_bot & ~service_desk_enabled }.policy do
prevent :create_note
prevent :read_project
prevent :guest_access
end
rule { project_bot }.enable :project_bot_access

View File

@ -5,7 +5,10 @@ class TodoPolicy < BasePolicy
condition(:own_todo) do
@user && @subject.user_id == @user.id
end
condition(:can_read_target) do
@user && @subject.target&.readable_by?(@user)
end
rule { own_todo }.enable :read_todo
rule { own_todo }.enable :update_todo
rule { own_todo & can_read_target }.enable :read_todo
rule { own_todo & can_read_target }.enable :update_todo
end

View File

@ -48,6 +48,9 @@ module Issues
params.delete(:created_at) unless moved_issue || current_user.can?(:set_issue_created_at, project)
params.delete(:updated_at) unless moved_issue || current_user.can?(:set_issue_updated_at, project)
# Only users with permission to handle error data can add it to issues
params.delete(:sentry_issue_attributes) unless current_user.can?(:update_sentry_issue, project)
issue.system_note_timestamp = params[:created_at] || params[:updated_at]
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
module Todos
class AllowedTargetFilterService
include Gitlab::Allowable
def initialize(todos, current_user)
@todos = todos
@current_user = current_user
end
def execute
Preloaders::UserMaxAccessLevelInProjectsPreloader.new(@todos.map(&:project).compact, @current_user).execute
@todos.select { |todo| can?(@current_user, :read_todo, todo) }
end
end
end

View File

@ -42,6 +42,7 @@
= link_to _("SSH keys"), keys_admin_user_path(@user)
= nav_link(controller: :identities) do
= link_to _("Identities"), admin_user_identities_path(@user)
= nav_link(controller: :impersonation_tokens) do
= link_to _("Impersonation Tokens"), admin_user_impersonation_tokens_path(@user)
- if impersonation_enabled?
= nav_link(controller: :impersonation_tokens) do
= link_to _("Impersonation Tokens"), admin_user_impersonation_tokens_path(@user)
.gl-mb-3

View File

@ -25,7 +25,7 @@
= number_with_delimiter(todos_done_count)
.nav-controls
- if @todos.any?(&:pending?)
- if @allowed_todos.any?(&:pending?)
.gl-mr-3
= link_to destroy_all_dashboard_todos_path(todos_filter_params), class: 'gl-button btn btn-default btn-loading align-items-center js-todos-mark-all', method: :delete, data: { href: destroy_all_dashboard_todos_path(todos_filter_params) } do
Mark all as done
@ -82,11 +82,11 @@
= sort_title_oldest_created
.row.js-todos-all
- if @todos.any?
- if @allowed_todos.any?
.col.js-todos-list-container{ data: { qa_selector: "todos_list_container" } }
.js-todos-options{ data: { per_page: @todos.limit_value, current_page: @todos.current_page, total_pages: @todos.total_pages } }
.js-todos-options{ data: { per_page: @allowed_todos.count, current_page: @todos.current_page, total_pages: @todos.total_pages } }
%ul.content-list.todos-list
= render @todos
= render @allowed_todos
= paginate @todos, theme: "gitlab"
.js-nothing-here-container.empty-state.hidden
.svg-content

View File

@ -1,29 +1,30 @@
- page_title _("Invitation")
%h3.page-title= _("Invitation")
%p
= _("You have been invited")
- inviter = @member.created_by
- if inviter
= _("by")
= link_to inviter.name, user_url(inviter)
= _("to join %{source_name}") % { source_name: @invite_details[:title] }
%strong
= link_to @invite_details[:name], @invite_details[:url]
= _("as %{role}.") % { role: @member.human_access }
- if current_user_matches_invite?
- if member?
%p
= _("You are already a member of this %{member_source}.") % { member_source: @invite_details[:title] }
.actions
= link_to _("Go to %{source_name}") % { source_name: @invite_details[:title] }, @invite_details[:url], class: "btn gl-button btn-confirm"
- if member?
%p
= _("However, you are already a member of this %{member_source}. Sign in using a different account to accept the invitation.") % { member_source: @invite_details[:title] }
- else
%p
- inviter = @member.created_by
- link_to_inviter = link_to(inviter.name, user_url(inviter))
- link_to_source = link_to(@invite_details[:name], @invite_details[:url])
- if !current_user_matches_invite?
= html_escape(_("You have been invited by %{link_to_inviter} to join %{source_name} %{strong_open}%{link_to_source}%{strong_close} as %{role}")) % { link_to_inviter: link_to_inviter, source_name: @invite_details[:title], strong_open: '<strong>'.html_safe, link_to_source: link_to_source, strong_close: '</strong>'.html_safe, role: @member.human_access }
.actions
= link_to _("Accept invitation"), accept_invite_url(@token), method: :post, class: "btn gl-button btn-confirm"
= link_to _("Decline"), decline_invite_url(@token), method: :post, class: "btn gl-button btn-danger gl-ml-3"
- else
%p
- mail_to_invite_email = mail_to(@member.invite_email)
- mail_to_current_user = mail_to(current_user.email)
- link_to_current_user = link_to(current_user.to_reference, user_url(current_user))
= _("Note that this invitation was sent to %{mail_to_invite_email}, but you are signed in as %{link_to_current_user} with email %{mail_to_current_user}.").html_safe % { mail_to_invite_email: mail_to_invite_email, mail_to_current_user: mail_to_current_user, link_to_current_user: link_to_current_user }
- if !member?
.actions
= link_to _("Accept invitation"), accept_invite_url(@token), method: :post, class: "btn gl-button btn-confirm"
= link_to _("Decline"), decline_invite_url(@token), method: :post, class: "btn gl-button btn-danger gl-ml-3"
= _("This invitation was sent to %{mail_to_invite_email}, but you are signed in as %{link_to_current_user} with email %{mail_to_current_user}.").html_safe % { mail_to_invite_email: mail_to_invite_email, mail_to_current_user: mail_to_current_user, link_to_current_user: link_to_current_user }
%p
= _("Sign in as a user with the matching email address, add the email to this account, or sign-up for a new account using the matching email.")

View File

@ -44,26 +44,26 @@
:preserve
git clone #{ content_tag(:span, default_url_to_repo, class: 'js-clone')}
cd #{h @project.path}
git switch -c #{default_branch_name}
git switch -c #{h default_branch_name}
touch README.md
git add README.md
git commit -m "add README"
- if @project.can_current_user_push_to_default_branch?
%span><
git push -u origin #{ default_branch_name }
git push -u origin #{h default_branch_name }
%fieldset
%h5= _('Push an existing folder')
%pre.bg-light
:preserve
cd existing_folder
git init --initial-branch=#{default_branch_name}
git init --initial-branch=#{h default_branch_name}
git remote add origin #{ content_tag(:span, default_url_to_repo, class: 'js-clone')}
git add .
git commit -m "Initial commit"
- if @project.can_current_user_push_to_default_branch?
%span><
git push -u origin #{ default_branch_name }
git push -u origin #{h default_branch_name }
%fieldset
%h5= _('Push an existing Git repository')

View File

@ -1,6 +1,10 @@
.gl-alert.gl-alert-warning.js-recovery-settings-callout{ role: 'alert', data: { feature_id: "account_recovery_regular_check", dismiss_endpoint: user_callouts_path, defer_links: "true" } }
%button.js-close.gl-alert-dismiss.gl-cursor-pointer{ type: 'button', 'aria-label' => _('Dismiss') }
= sprite_icon('close', css_class: 'gl-icon')
= render 'shared/global_alert',
variant: :warning,
alert_class: 'js-recovery-settings-callout',
alert_data: { feature_id: 'account_recovery_regular_check', dismiss_endpoint: user_callouts_path, defer_links: 'true' } do
.gl-alert-body
- account_link_start = '<a class="deferred-link" href="%{url}">'.html_safe % { url: profile_account_path }
= _("Please ensure your account's %{account_link_start}recovery settings%{account_link_end} are up to date.").html_safe % { account_link_start: account_link_start, account_link_end: '</a>'.html_safe }
= s_('Profiles|We recommend you ensure two-factor authentication is enabled and the settings are up to date.')
= link_to _('Learn more.'), help_page_path('user/profile/account/two_factor_authentication'), target: '_blank', rel: 'noopener noreferrer'
.gl-alert-actions
= link_to profile_two_factor_auth_path, class: 'deferred-link btn gl-alert-action btn-confirm btn-md gl-button' do
= s_('Profiles|Manage two-factor authentication')

View File

@ -19,6 +19,4 @@ OmniAuth.config.before_request_phase do |env|
Gitlab::RequestForgeryProtection.call(env)
end
# Use json formatter
OmniAuth.config.logger.formatter = Gitlab::OmniauthLogging::JSONFormatter.new
OmniAuth.config.logger.level = Logger::ERROR if Rails.env.production?
OmniAuth.config.logger = Gitlab::AppLogger

View File

@ -0,0 +1,55 @@
# frozen_string_literal: true
class FinalizeConvertGeoJobArtifactDeletedEventsBigint < ActiveRecord::Migration[6.1]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
TABLE_NAME = 'geo_job_artifact_deleted_events'
COLUMN_NAME = 'job_artifact_id'
COLUMN_NAME_CONVERTED = "#{COLUMN_NAME}_convert_to_bigint"
def up
ensure_batched_background_migration_is_finished(
job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
table_name: TABLE_NAME,
column_name: 'id',
job_arguments: [[COLUMN_NAME], [COLUMN_NAME_CONVERTED]]
)
swap
end
def down
swap
end
def swap
old_index_name = 'index_geo_job_artifact_deleted_events_on_job_artifact_id'
bigint_index_name = 'index_geo_job_artifact_deleted_events_on_job_artifact_id_bigint'
add_concurrent_index TABLE_NAME, COLUMN_NAME_CONVERTED, name: bigint_index_name
with_lock_retries(raise_on_exhaustion: true) do
execute("LOCK TABLE #{TABLE_NAME} IN ACCESS EXCLUSIVE MODE")
temp_name = quote_column_name("#{COLUMN_NAME}_tmp")
old_column_name = quote_column_name(COLUMN_NAME)
new_column_name = quote_column_name(COLUMN_NAME_CONVERTED)
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{old_column_name} TO #{temp_name}"
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{new_column_name} TO #{old_column_name}"
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN #{temp_name} TO #{new_column_name}"
change_column_default TABLE_NAME, COLUMN_NAME, nil
change_column_default TABLE_NAME, COLUMN_NAME_CONVERTED, 0
function_name = Gitlab::Database::UnidirectionalCopyTrigger.on_table(TABLE_NAME).name(COLUMN_NAME, COLUMN_NAME_CONVERTED)
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
execute "DROP INDEX #{old_index_name}"
rename_index TABLE_NAME, bigint_index_name, old_index_name
end
end
end

View File

@ -0,0 +1 @@
d98c54e5ec60fc0ee1c008160118f6f0c45eb801932d4d3abcd26aba33ebdea6

View File

@ -13252,9 +13252,9 @@ ALTER SEQUENCE geo_hashed_storage_migrated_events_id_seq OWNED BY geo_hashed_sto
CREATE TABLE geo_job_artifact_deleted_events (
id bigint NOT NULL,
job_artifact_id integer NOT NULL,
job_artifact_id_convert_to_bigint integer DEFAULT 0 NOT NULL,
file_path character varying NOT NULL,
job_artifact_id_convert_to_bigint bigint DEFAULT 0 NOT NULL
job_artifact_id bigint NOT NULL
);
CREATE SEQUENCE geo_job_artifact_deleted_events_id_seq

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 154 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 84 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 91 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 52 KiB

View File

@ -0,0 +1,12 @@
---
stage: Enablement
group: Database
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
comments: false
description: 'Learn how to scale the database through the use of best-of-class database scalability patterns'
---
# Database Scalability Patterns
- [Read-mostly](read_mostly.md)
- [Time-decay](time_decay.md)

View File

@ -0,0 +1,152 @@
---
stage: Enablement
group: database
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
comments: false
description: 'Learn how to scale operating on read-mostly data at scale'
---
# Read-mostly data
[Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/326037) in GitLab 14.0.
This document describes the *read-mostly* pattern introduced in the
[Database Scalability Working Group](https://about.gitlab.com/company/team/structure/working-groups/database-scalability/#read-mostly-data).
We discuss the characteristics of *read-mostly* data and propose best practices for GitLab development
to consider in this context.
## Characteristics of read-mostly data
As the name already suggests, *read-mostly* data is about data that is much more often read than
updated. Writing this data through updates, inserts, or deletes is a very rare event compared to
reading this data.
In addition, *read-mostly* data in this context is typically a small dataset. We explicitly don't deal
with large datasets here, even though they often have a "write once, read often" characteristic, too.
### Example: license data
Let's introduce a canonical example: license data in GitLab. A GitLab instance may have a license
attached to use GitLab enterprise features. This license data is held instance-wide, that
is, there typically only exist a few relevant records. This information is kept in a table
`licenses` which is very small.
We consider this *read-mostly* data, because it follows above outlined characteristics:
- **Rare writes:** license data very rarely sees any writes after having inserted the license.
- **Frequent reads:** license data is read extremely often to check if enterprise features can be used.
- **Small size:** this dataset is very small. On GitLab.com we have 5 records at < 50 kB total relation size.
### Effects of *read-mostly* data at scale
Given this dataset is small and read very often, we can expect data to nearly always reside in
database caches and/or database disk caches. Thus, the concern with *read-mostly* data is typically
not around database I/O overhead, because we typically don't read data from disk anyway.
However, considering the high frequency reads, this has potential to incur overhead in terms of
database CPU load and database context switches. Additionally, those high frequency queries go
through the whole database stack. They also cause overhead on the database connection
multiplexing components and load balancers. Also, the application spends cycles in preparing and
sending queries to retrieve the data, deserialize the results and allocate new objects to represent
the information gathered - all in a high frequency fashion.
In the example of license data above, the query to read license data was
[identified](https://gitlab.com/gitlab-org/gitlab/-/issues/292900) to stand out in terms of query
frequency. In fact, we were seeing around 6,000 queries per second (QPS) on the cluster during peak
times. With the cluster size at that time, we were seeing about 1,000 QPS on each replica, and fewer
than 400 QPS on the primary at peak times. The difference is explained by our
[database load balancing for scaling reads](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/gitlab/database/load_balancing.rb),
which favors replicas for pure read-only transactions.
![Licenses Calls](img/read_mostly_licenses_calls_v14_2.png)
The overall transaction throughput on the database primary at the time varied between 50,000 and
70,000 transactions per second (TPS). In comparison, this query frequency only takes a small
portion of the overall query frequency. However, we do expect this to still have considerable
overhead in terms of context switches. It is worth removing this overhead, if we can.
## How to recognize read-mostly data
It can be difficult to recognize *read-mostly* data, even though there are clear cases like in our
example.
One approach is to look at the [read/write ratio and statistics from, for example, the primary](https://bit.ly/3frdtyz). Here, we look at the TOP20 tables by their read/write ratio over 60 minutes (taken in a peak traffic time):
```plaintext
bottomk(20,
avg by (relname, fqdn) (
(
rate(pg_stat_user_tables_seq_tup_read{env="gprd"}[1h])
+
rate(pg_stat_user_tables_idx_tup_fetch{env="gprd"}[1h])
) /
(
rate(pg_stat_user_tables_seq_tup_read{env="gprd"}[1h])
+ rate(pg_stat_user_tables_idx_tup_fetch{env="gprd"}[1h])
+ rate(pg_stat_user_tables_n_tup_ins{env="gprd"}[1h])
+ rate(pg_stat_user_tables_n_tup_upd{env="gprd"}[1h])
+ rate(pg_stat_user_tables_n_tup_del{env="gprd"}[1h])
)
) and on (fqdn) (pg_replication_is_replica == 0)
)
```
This yields a good impression of which tables are much more often read than written (on the database
primary):
![Read Write Ratio TOP20](img/read_mostly_readwriteratio_v14_2.png)
From here, we can [zoom](https://bit.ly/2VmloX1) into for example `gitlab_subscriptions` and realize that index reads peak at above 10k tuples per second overall (there are no seq scans):
![Subscriptions: reads](img/read_mostly_subscriptions_reads_v14_2.png)
We very rarely write to the table (there are no seq scans):
![Subscriptions: writes](img/read_mostly_subscriptions_writes_v14_2.png)
Additionally, the table is only 400 MB in size - so this may be another candidate we may want to
consider in this pattern (see [#327483](https://gitlab.com/gitlab-org/gitlab/-/issues/327483)).
## Best practices for handling read-mostly data at scale
### Cache read-mostly data
To reduce the database overhead, we implement a cache for the data and thus significantly
reduce the query frequency on the database side. There are different scopes for caching available:
- `RequestStore`: per-request in-memory cache (based on [request_store gem](https://github.com/steveklabnik/request_store))
- [`ProcessMemoryCache`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/process_memory_cache.rb#L4): per-process in-memory cache (a `ActiveSupport::Cache::MemoryStore`)
- [`Gitlab::Redis::Cache`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/redis/cache.rb) and `Rails.cache`: full-blown cache in Redis
Continuing the above example, we had a `RequestStore` in place to cache license information on a
per-request basis. However, that still leads to one query per request. When we started to cache license information
[using a process-wide in-memory cache](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/50318)
for 1 second, query frequency dramatically dropped:
![Licenses Calls - Fixed](img/read_mostly_licenses_fixed_v14_2.png)
The choice of caching here highly depends on the characteristics of data in question. A very small
dataset like license data that is nearly never updated is a good candidate for in-memory caching.
A per-process cache is favorable here, because this unties the cache refresh rate from the incoming
request rate.
A caveat here is that our Redis setup is currently not using Redis secondaries and we rely on a
single node for caching. That is, we need to strike a balance to avoid Redis falling over due to
increased pressure. In comparison, reading data from PostgreSQL replicas can be distributed across
several read-only replicas. Even though a query to the database might be more expensive, the
load is balanced across more nodes.
### Read read-mostly data from replica
With or without caching implemented, we also must make sure to read data from database replicas if
we can. This supports our efforts to scale reads across many database replicas and removes
unnecessary workload from the database primary.
GitLab [database load balancing for reads](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/gitlab/database/load_balancing.rb)
sticks to the primary after a first write or when opening an
explicit transaction. In the context of *read-mostly* data, we strive to read this data outside of a
transaction scope and before doing any writes. This is often possible given that this data is only
seldom updated (and thus we're often not concerned with reading slightly stale data, for example).
However, it can be non-obvious that this query cannot be sent to a replica because of a previous
write or transaction. Hence, when we encounter *read-mostly* data, it is a good practice to check the
wider context and make sure this data can be read from a replica.

View File

@ -0,0 +1,361 @@
---
stage: Enablement
group: database
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#assignments
comments: false
description: 'Learn how to operate on large time-decay data'
---
# Time-decay data
[Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/326035) in GitLab 14.0.
This document describes the *time-decay pattern* introduced in the
[Database Scalability Working Group](https://about.gitlab.com/company/team/structure/working-groups/database-scalability/#time-decay-data).
We discuss the characteristics of time-decay data, and propose best practices for GitLab development
to consider in this context.
Some datasets are subject to strong time-decay effects, in which recent data is accessed far more
frequently than older data. Another aspect of time-decay: with time, some types of data become
less important. This means we can also move old data to a bit less durable (less available) storage,
or even delete the data, in extreme cases.
Those effects are usually tied to product or application semantics. They can vary in the degree
that older data are accessed, and how useful or required older data are to the users or the
application.
Let's first consider entities with no inherent time-related bias for their data.
A record for a user or a project may be equally important and frequently accessed, irrelevant to when
it was created. We can not predict by using a user's `id` or `created_at` how often the related
record is accessed or updated.
On the other hand, a good example for datasets with extreme time-decay effects are logs and time
series data, such as events recording user actions.
Most of the time, that type of data may have no business use after a couple of days or weeks, and
quickly become less important even from a data analysis perspective. They represent a snapshot that
quickly becomes less and less relevant to the current state of the application, until at
some point it has no real value.
In the middle of the two extremes, we can find datasets that have useful information that we want to
keep around, but with old records seldom being accessed after an initial (small) time period after
creation.
## Characteristics of time-decay data
We are interested in datasets that show the following characteristics:
- **Size of the dataset:** they are considerably large.
- **Access methods:** we can filter the vast majority of queries accessing the dataset
by a time related dimension or a categorical dimension with time decay effects.
- **Immutability:** the time-decay status does not change.
- **Retention:** whether we want to keep the old data or not, or whether old
data should remain accessible by users through the application.
### Size of the dataset
There can be datasets of variable sizes that show strong time-decay effects, but in the context of
this blueprint, we intend to focus on entities with a **considerably large dataset**.
Smaller datasets do not contribute significantly to the database related resource usage, nor do they
inflict a considerable performance penalty to queries.
In contrast, large datasets over about 50 million records, or 100 GB in size, add a significant
overhead to constantly accessing a really small subset of the data. In those cases, we would want to
use the time-decay effect in our advantage and reduce the actively accessed dataset.
### Data access methods
The second and most important characteristic of time-decay data is that most of the time, we are
able to implicitly or explicitly access the data using a date filter,
**restricting our results based on a time-related dimension**.
There can be many such dimensions, but we are only going to focus on the creation date as it is both
the most commonly used, and the one that we can control and optimize against. It:
- Is immutable.
- Is set when the record is created
- Can be tied to physically clustering the records, without having to move them around.
It's important to add that even if time-decay data are not accessed that way by the application by
default, you can make the vast majority of the queries explicitly filter the data in such
a way. **Time decay data without such a time-decay related access method are of no use from an optimization perspective, as there is no way to set and follow a scaling pattern.**
We are not restricting the definition to data that are always accessed using a time-decay related
access method, as there may be some outlier operations. These may be necessary and we can accept
them not scaling, if the rest of the access methods can scale. An example:
an administrator accessing all past events of a specific type, while all other operations only access
a maximum of a month of events, restricted to 6 months in the past.
### Immutability
The third characteristic of time-decay data is that their **time-decay status does not change**.
Once they are considered "old", they can not switch back to "new" or relevant again.
This definition may sound trivial, but we have to be able to make operations over "old" data **more**
expensive (for example, by archiving or moving them to less expensive storage) without having to worry about
the repercussions of switching back to being relevant and having important application operations
underperforming.
Consider as a counter example to a time-decay data access pattern an application view that presents
issues by when they were updated. We are also interested in the most recent data from an "update"
perspective, but that definition is volatile and not actionable.
### Retention
Finally, a characteristic that further differentiates time-decay data in sub-categories with
slightly different approaches available is **whether we want to keep the old data or not**
(for example, retention policy) and/or
**whether old data will be accessible by users through the application**.
#### (optional) Extended definition of time-decay data
As a side note, if we extend the aforementioned definitions to access patterns that restrict access
to a well defined subset of the data based on a clustering attribute, we could use the time-decay
scaling patterns for many other types of data.
As an example, consider data that are only accessed while they are labeled as active, like To-Dos
not marked as done, pipelines for unmerged merge requests (or a similar not time based constraint), etc.
In this case, instead of using a time dimension to define the decay, we use a categorical dimension
(that is, one that uses a finite set of values) to define the subset of interest. As long as that
subset is small compared to the overall size of the dataset, we could use the same approach.
Similarly, we may define data as old based both on a time dimension and additional status attributes,
such as CI pipelines that failed more than 6 months ago.
## Time-decay data strategies
### Partition tables
This is the acceptable best practice for addressing time-decay data from a pure database perspective.
You can find more information on table partitioning for PostgreSQL in the
[documentation page for table partitioning](https://www.postgresql.org/docs/12/ddl-partitioning.html).
Partitioning by date intervals (for example, month, year) allows us to create much smaller tables
(partitions) for each date interval and only access the most recent partition(s) for any
application related operation.
We have to set the partitioning key based on the date interval of interest, which may depend on two
factors:
1. **How far back in time do we need to access data for?**
Partitioning by week is of no use if we always access data for a year back, as we would have to
execute queries over 52 different partitions (tables) each time. As an example for that consider the
activity feed on the profile of any GitLab user.
In contrast, if we want to just access the last 7 days of created records, partitioning by year
would include too many unnecessary records in each partition, as is the case for `web_hook_logs`.
1. **How large are the partitions created?**
The major purpose of partitioning is accessing tables that are as small as possible. If they get too
large by themselves, queries will start underperforming. We may have to re-partition (split) them
in even smaller partitions.
The perfect partitioning scheme keeps **all queries over a dataset almost always over a single partition**,
with some cases going over two partitions and seldom over multiple partitions being
an acceptable balance. We should also target for **partitions that are as small as possible**, below
5-10M records and/or 10GB each maximum.
Partitioning can be combined with other strategies to either prune (drop) old partitions, move them
to cheaper storage inside the database or move them outside of the database (archive or use of other
types of storage engines).
As long as we do not want to keep old records and partitioning is used, pruning old data has a
constant, for all intents and purposes zero, cost compared to deleting the data from a huge table
(as described in the following sub-section). We just need a background worker to drop old partitions
whenever all the data inside that partition get out of the retention policy's period.
As an example, if we only want to keep records no more than 6 months old and we partition by month,
we can safely keep the 7 latest partitions at all times (current month and 6 months in the past).
That means that we can have a worker dropping the 8th oldest partition at the start of each month.
Moving partitions to cheaper storage inside the same database is relatively simple in PostgreSQL
through the use of [tablespaces](https://www.postgresql.org/docs/12/manage-ag-tablespaces.html).
It is possible to specify a tablespace and storage parameters for each partition separately, so the
approach in this case would be to:
1. Create a new tablespace on a cheaper, slow disk.
1. Set the storage parameters higher on that new tablespace so that the PostgreSQL optimizer knows that the disks are slower.
1. Move the old partitions to the slow tablespace automatically by using background workers.
Finally, moving partitions outside of the database can be achieved through database archiving or
manually exporting the partitions to a different storage engine (more details in the dedicated
sub-section).
### Prune old data
If we don't want to keep old data around in any form, we can implement a pruning strategy and
delete old data.
It's a simple-to-implement strategy that uses a pruning worker to delete past data. As an example
that we further analyze below, we are pruning old `web_hook_logs` older than 90 days.
The disadvantage of such a solution over large, non-partitioned tables is that we have to manually
access and delete all the records that are considered as not relevant any more. That is a very
expensive operation, due to multi-version concurrency control in PostgreSQL. It also leads to the
pruning worker not being able to catch up with new records being created, if that rate exceeds a
threshold, as is the case of [web_hook_logs](https://gitlab.com/gitlab-org/gitlab/-/issues/256088)
at the time of writing this document.
For the aforementioned reasons, our proposal is that
**we should base any implementation of a data retention strategy on partitioning**,
unless there are strong reasons not to.
### Move old data outside of the database
In most cases, we consider old data as valuable, so we do not want to prune them. If at the same
time, they are not required for any database related operations (for example, directly accessed or used in
joins and other types of queries), we can move them outside of the database.
That does not mean that they are not directly accessible by users through the application; we could
move data outside the database and use other storage engines or access types for them, similarly to
offloading metadata but only for the case of old data.
In the simplest use case we can provide fast and direct access to recent data, while allowing users
to download an archive with older data. This is an option evaluated in the `audit_events` use case.
Depending on the country and industry, audit events may have a very long retention period, while
only the past month(s) of data are actively accessed through GitLab interface.
Additional use cases may include exporting data to a data warehouse or other types of data stores as
they may be better suited for processing that type of data. An example can be JSON logs that we
sometimes store in tables: loading such data into a BigQuery or a columnar store like Redshift may
be better for analyzing/querying the data.
We might consider a number of strategies for moving data outside of the database:
1. Streaming this type of data into logs and then move them to secondary storage options
or load them to other types of data stores directly (as CSV/JSON data).
1. Creating an ETL process that exports the data to CSV, uploads them to object storage,
drops this data from the database, and then loads the CSV into a different data store.
1. Loading the data in the background by using the API provided by the data store.
This may be a not viable solution for large datasets; as long as bulk uploading using files is an
option, it should outperform API calls.
## Use cases
### Web hook logs
Related epic: [Partitioning: `web_hook_logs` table](https://gitlab.com/groups/gitlab-org/-/epics/5558)
The important characteristics of `web_hook_logs` are the following:
1. Size of the dataset: it is a really large table. At the moment we decided to
partition it (`2021-03-01`), it had roughly 527M records and a total size of roughly 1TB
- Table: `web_hook_logs`
- Rows: approximately 527M
- Total size: 1.02 TiB (10.46%)
- Table size: 713.02 GiB (13.37%)
- Index(es) size: 42.26 GiB (1.10%)
- TOAST size: 279.01 GiB (38.56%)
1. Access methods: we always request for the past 7 days of logs at max.
1. Immutability: it can be partitioned by `created_at`, an attribute that does not change.
1. Retention: there is a 90 days retention policy set for it.
Additionally, we were at the time trying to prune the data by using a background worker
(`PruneWebHookLogsWorker`), which could not [keep up with the rate of inserts](https://gitlab.com/gitlab-org/gitlab/-/issues/256088).
As a result, on March 2021 there were still not deleted records since July 2020 and the table was
increasing in size by more than 2 million records per day instead of staying at a more or less
stable size.
Finally, the rate of inserts has grown to more than 170GB of data per month by March 2021 and keeps
on growing, so the only viable solution to pruning old data was through partitioning.
Our approach was to partition the table per month as it aligned with the 90 days retention policy.
The process required follows:
1. Decide on a partitioning key
Using the `created_at` column is straightforward in this case: it is a natural
partitioning key when a retention policy exists and there were no conflicting access patterns.
1. After we decide on the partitioning key, we can create the partitions and backfill
them (copy data from the existing table). We can't just partition an existing table;
we have to create a new partitioned table.
So, we have to create the partitioned table and all the related partitions, start copying everything
over, and also add sync triggers so that any new data or updates/deletes to existing data can be
mirrored to the new partitioned table.
[MR with all the necessary details on how to start partitioning a table](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/55938)
It required 15 days and 7.6 hours to complete that process.
1. One milestone after the initial partitioning starts, clean up after the background migration
used to backfill and finish executing any remaining jobs, retry failed jobs, etc.
[MR with all the necessary details](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/57580)
1. Add any remaining foreign keys and secondary indexes to the partitioned table. This brings
its schema on par with the original non partitioned table before we can swap them in the next milestone.
We are not adding them at the beginning as they are adding overhead to each insert and they
would slow down the initial backfilling of the table (in this case for more than half a billion
records, which can add up significantly). So we create a lightweight, *vanilla* version of the
table, copy all the data and then add any remaining indexes and foreign keys.
1. Swap the base table with partitioned copy: this is when the partitioned table
starts actively being used by the application.
Dropping the original table is a destructive operation, and we want to make sure that we had no
issues during the process, so we keep the old non-partitioned table. We also switch the sync trigger
the other way around so that the non-partitioned table is still up to date with any operations
happening on the partitioned table. That allows us to swap back the tables if it is necessary.
[MR with all the necessary details](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/60184)
1. Last step, one milestone after the swap: drop the non-partitioned table
[Issue with all the necessary details](https://gitlab.com/gitlab-org/gitlab/-/issues/323678)
1. After the non-partitioned table is dropped, we can add a worker to implement the
pruning strategy by dropping past partitions.
In this case, the worker will make sure that only 4 partitions are always active (as the
retention policy is 90 days) and drop any partitions older than four months. We have to keep 4
months of partitions while the current month is still active, as going 90 days back takes you to
the fourth oldest partition.
### Audit Events
Related epic: [Partitioning: Design and implement partitioning strategy for Audit Events](https://gitlab.com/groups/gitlab-org/-/epics/3206)
The `audit_events` table shares a lot of characteristics with the `web_hook_logs` table discussed
in the previous sub-section, so we are going to focus on the points they differ.
The consensus was that
[partitioning could solve most of the performance issues](https://gitlab.com/groups/gitlab-org/-/epics/3206#note_338157248).
In contrast to most other large tables, it has no major conflicting access patterns: we could switch
the access patterns to align with partitioning by month. This is not the case for example for other
tables, which even though could justify a partitioning approach (for example, by namespace), they have many
conflicting access patterns.
In addition, `audit_events` is a write-heavy table with very few reads (queries) over it and has a
very simple schema, not connected with the rest of the database (no incoming or outgoing FK
constraints) and with only two indexes defined over it.
The later was important at the time as not having Foreign Key constraints meant that we could
partition it while we were still in PostgreSQL 11. *This is not a concern any more now that we have
moved to PostgreSQL 12 as a required default, as can be seen for the `web_hook_logs` use case above.*
The migrations and steps required for partitioning the `audit_events` are similar to
the ones described in the previous sub-section for `web_hook_logs`. There is no retention
strategy defined for `audit_events` at the moment, so there is no pruning strategy
implemented over it, but we may implement an archiving solution in the future.
What's interesting on the case of `audit_events` is the discussion on the necessary steps that we
had to follow to implement the UI/UX Changes needed to
[encourage optimal querying of the partitioned](https://gitlab.com/gitlab-org/gitlab/-/issues/223260).
It can be used as a starting point on the changes required on the application level
to align all access patterns with a specific time-decay related access method.
### CI tables
NOTE:
Requirements and analysis of the CI tables use case: still a work in progress. We intend
to add more details after the analysis moves forward.

View File

@ -23,7 +23,7 @@ To begin the activation process with your activation code:
1. Sign in to your GitLab self-managed instance.
1. From the top menu, select the Admin Area **{admin}**.
1. From the left sidebar, select **Subscriptions**.
1. From the left sidebar, select **Subscription**.
1. Paste the activation code onto the input field.
1. Read and accept the terms of service.
1. Select **Activate**.
@ -36,13 +36,13 @@ If you receive a license file from GitLab (for example a new trial), you can upl
The first time you visit your GitLab EE installation signed in as an administrator,
you should see a note urging you to upload a license with a link that takes you
to the **Subscriptions** area.
to the **Subscription** area.
Otherwise, to manually go to the **Subscriptions** area:
Otherwise, to manually go to the **Subscription** area:
1. Sign in to your GitLab self-managed instance.
1. From the top menu, select the Admin Area **{admin}**.
1. From the left sidebar, select **Subscriptions**, and select **Upload a license file**.
1. From the left sidebar, select **Subscription**, and select **Upload a license file**.
- *If you've received a `.gitlab-license` file:*
1. Download the license file to your local machine.
@ -116,7 +116,7 @@ before this occurs.
To remove a license file from a self-managed instance:
1. From the top menu, select the Admin Area **{admin}**.
1. From the left sidebar, select **Subscriptions**
1. From the left sidebar, select **Subscription**
1. Select **Remove license**.
These steps may need to be repeated to completely remove all licenses, including those applied in the past.

View File

@ -86,13 +86,12 @@ A blocked user:
- Cannot access Git repositories or the API.
- Does not receive any notifications from GitLab.
- Cannot use [slash commands](../../integration/slash_commands.md).
- Does not consume a [seat](../../subscriptions/self_managed/index.md#billable-users).
Personal projects, and group and user history of the blocked user are left intact.
Users can also be blocked using the [GitLab API](../../api/users.md#block-user).
NOTE:
A blocked user does not consume a [seat](../../subscriptions/self_managed/index.md#billable-users).
Users can also be blocked using the [GitLab API](../../api/users.md#block-user).
### Unblock a user
@ -105,11 +104,11 @@ A blocked user can be unblocked from the Admin Area. To do this:
1. Select the **{settings}** **User administration** dropdown.
1. Select **Unblock**.
Users can also be unblocked using the [GitLab API](../../api/users.md#unblock-user).
The user's state is set to active and they consume a
[seat](../../subscriptions/self_managed/index.md#billable-users).
NOTE:
Unblocking a user changes the user's state to active and consumes a
[seat](../../subscriptions/self_managed/index.md#billable-users).
Users can also be unblocked using the [GitLab API](../../api/users.md#unblock-user).
## Activate and deactivate users
@ -133,6 +132,7 @@ A deactivated user:
- Cannot access Git repositories or the API.
- Does not receive any notifications from GitLab.
- Does not be able to use [slash commands](../../integration/slash_commands.md).
- Does not consume a [seat](../../subscriptions/self_managed/index.md#billable-users).
Personal projects, and group and user history of the deactivated user are left intact.
@ -149,10 +149,8 @@ For the deactivation option to be visible to an admin, the user:
- Must be currently active.
- Must not have signed in, or have any activity, in the last 90 days.
Users can also be deactivated using the [GitLab API](../../api/users.md#deactivate-user).
NOTE:
A deactivated user does not consume a [seat](../../subscriptions/self_managed/index.md#billable-users).
Users can also be deactivated using the [GitLab API](../../api/users.md#deactivate-user).
### Automatically deactivate dormant users
@ -186,25 +184,19 @@ To do this:
1. Select the **{settings}** **User administration** dropdown.
1. Select **Activate**.
Users can also be activated using the [GitLab API](../../api/users.md#activate-user).
NOTE:
Activating a user changes the user's state to active and consumes a
The user's state is set to active and they consume a
[seat](../../subscriptions/self_managed/index.md#billable-users).
NOTE:
A deactivated user can also activate their account themselves by logging back in via the UI.
Users can also be activated using the [GitLab API](../../api/users.md#activate-user).
## Ban and unban users
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/327353) in GitLab 13.12.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/327353) in GitLab 14.2.
GitLab administrators can ban users.
NOTE:
This feature is behind a feature flag that is disabled by default. GitLab administrators
with access to the GitLab Rails console can [enable](../../administration/feature_flags.md)
this feature for your GitLab instance.
GitLab administrators can ban and unban users. The banned user's issues are still displayed. Hiding
a banned user's issues is a [work in progress](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/66687).
### Ban a user
@ -218,9 +210,7 @@ Users can be banned using the Admin Area. To do this:
1. Select the **{settings}** **User administration** dropdown.
1. Select **Ban user**.
NOTE:
This feature is a work in progress. Currently, banning a user
only blocks them and does not hide their comments or issues.
The banned user does not consume a [seat](../../subscriptions/self_managed/index.md#billable-users).
### Unban a user
@ -233,6 +223,5 @@ A banned user can be unbanned using the Admin Area. To do this:
1. Select the **{settings}** **User administration** dropdown.
1. Select **Unban user**.
NOTE:
Unbanning a user changes the user's state to active and consumes a
The user's state is set to active and they consume a
[seat](../../subscriptions/self_managed/index.md#billable-users).

View File

@ -16,7 +16,10 @@ clients use, see the [Helm API documentation](../../../api/packages/helm.md).
## Build a Helm package
Creating a Helm package is documented [in the Helm documentation](https://helm.sh/docs/intro/using_helm/#creating-your-own-charts).
Read more in the Helm documentation about these topics:
- [Create your own Helm charts](https://helm.sh/docs/intro/using_helm/#creating-your-own-charts)
- [Package a Helm chart into a chart archive](https://helm.sh/docs/helm/helm_package/#helm-package)
## Authenticate to the Helm repository

View File

@ -23,7 +23,7 @@ module API
helpers do
def finder_params(current_user)
current_user.admin? ? { user: user(params[:user_id]) } : { user: current_user }
current_user.admin? ? { user: user(params[:user_id]) } : { user: current_user, impersonation: false }
end
def user(user_id)

View File

@ -92,6 +92,7 @@ module API
end
get do
todos = paginate(find_todos.with_entity_associations)
todos = ::Todos::AllowedTargetFilterService.new(todos, current_user).execute
options = { with: Entities::Todo, current_user: current_user }
batch_load_issuable_metadata(todos, options)

View File

@ -193,7 +193,10 @@ module Gitlab
def personal_access_token_check(password, project)
return unless password.present?
token = PersonalAccessTokensFinder.new(state: 'active').find_by_token(password)
finder_options = { state: 'active' }
finder_options[:impersonation] = false unless Gitlab.config.gitlab.impersonation_enabled
token = PersonalAccessTokensFinder.new(finder_options).find_by_token(password)
return unless token

View File

@ -26,13 +26,13 @@ module Gitlab
def branch_exists?
strong_memoize(:is_branch) do
project.repository.branch_exists?(ref)
branch_ref? && project.repository.branch_exists?(ref)
end
end
def tag_exists?
strong_memoize(:is_tag) do
project.repository.tag_exists?(ref)
tag_ref? && project.repository.tag_exists?(ref)
end
end
@ -106,6 +106,32 @@ module Gitlab
metrics.pipeline_failure_reason_counter
.increment(reason: (reason || :unknown_failure).to_s)
end
private
# Verifies that origin_ref is a fully qualified tag reference (refs/tags/<tag-name>)
#
# Fallbacks to `true` for backward compatibility reasons
# if origin_ref is a short ref
def tag_ref?
return true if full_git_ref_name_unavailable?
Gitlab::Git.tag_ref?(origin_ref).present?
end
# Verifies that origin_ref is a fully qualified branch reference (refs/heads/<branch-name>)
#
# Fallbacks to `true` for backward compatibility reasons
# if origin_ref is a short ref
def branch_ref?
return true if full_git_ref_name_unavailable?
Gitlab::Git.branch_ref?(origin_ref).present?
end
def full_git_ref_name_unavailable?
ref == origin_ref
end
end
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
module Gitlab
module Graphql
module TodosProjectPermissionPreloader
class FieldExtension < ::GraphQL::Schema::FieldExtension
def after_resolve(value:, memo:, **rest)
todos = value.to_a
Preloaders::UserMaxAccessLevelInProjectsPreloader.new(
todos.map(&:project).compact,
current_user(rest)
).execute
value
end
private
def current_user(options)
options.dig(:context, :current_user)
end
end
end
end
end

View File

@ -39,7 +39,7 @@ module Gitlab
def save_markdown(updates)
return unless persisted? && Gitlab::Database.read_write?
return if cached_markdown_version < cached_markdown_version_in_database
return if cached_markdown_version.to_i < cached_markdown_version_in_database.to_i
update_columns(updates)
end

View File

@ -1,13 +0,0 @@
# frozen_string_literal: true
require 'json'
module Gitlab
module OmniauthLogging
class JSONFormatter
def call(severity, datetime, progname, msg)
{ severity: severity, timestamp: datetime.utc.iso8601(3), pid: $$, progname: progname, message: msg }.to_json << "\n"
end
end
end
end

View File

@ -46,6 +46,7 @@ module Sidebars
def ci_cd_analytics_menu_item
if !context.project.feature_available?(:builds, context.current_user) ||
!can?(context.current_user, :read_build, context.project) ||
!can?(context.current_user, :read_ci_cd_analytics, context.project) ||
context.project.empty_repo?
return ::Sidebars::NilMenuItem.new(item_id: :ci_cd_analytics)
end

View File

@ -15320,6 +15320,9 @@ msgstr ""
msgid "Go full screen"
msgstr ""
msgid "Go to %{source_name}"
msgstr ""
msgid "Go to commits"
msgstr ""
@ -16452,9 +16455,6 @@ msgstr ""
msgid "How many users will be evaluating the trial?"
msgstr ""
msgid "However, you are already a member of this %{member_source}. Sign in using a different account to accept the invitation."
msgstr ""
msgid "I accept the %{terms_link}"
msgstr ""
@ -22622,9 +22622,6 @@ msgstr ""
msgid "Note that pushing to GitLab requires write access to this repository."
msgstr ""
msgid "Note that this invitation was sent to %{mail_to_invite_email}, but you are signed in as %{link_to_current_user} with email %{mail_to_current_user}."
msgstr ""
msgid "Note: As an administrator you may like to configure %{github_integration_link}, which will allow login via GitHub and allow connecting repositories without generating a Personal Access Token."
msgstr ""
@ -24723,9 +24720,6 @@ msgstr ""
msgid "Please enable and migrate to hashed storage to avoid security issues and ensure data integrity. %{migrate_link}"
msgstr ""
msgid "Please ensure your account's %{account_link_start}recovery settings%{account_link_end} are up to date."
msgstr ""
msgid "Please enter a non-negative number"
msgstr ""
@ -24873,6 +24867,9 @@ msgstr ""
msgid "Point to any links you like: documentation, built binaries, or other related materials. These can be internal or external links from your GitLab instance. Duplicate URLs are not allowed."
msgstr ""
msgid "Policies"
msgstr ""
msgid "Policy project doesn't exist"
msgstr ""
@ -25371,6 +25368,9 @@ msgstr ""
msgid "Profiles|Main settings"
msgstr ""
msgid "Profiles|Manage two-factor authentication"
msgstr ""
msgid "Profiles|No file chosen."
msgstr ""
@ -25476,6 +25476,9 @@ msgstr ""
msgid "Profiles|Using emojis in names seems fun, but please try to set a status message instead"
msgstr ""
msgid "Profiles|We recommend you ensure two-factor authentication is enabled and the settings are up to date."
msgstr ""
msgid "Profiles|What's your status?"
msgstr ""
@ -28820,9 +28823,6 @@ msgstr ""
msgid "Saving project."
msgstr ""
msgid "Scan Policies"
msgstr ""
msgid "Scanner"
msgstr ""
@ -30547,6 +30547,9 @@ msgstr ""
msgid "Sign in / Register"
msgstr ""
msgid "Sign in as a user with the matching email address, add the email to this account, or sign-up for a new account using the matching email."
msgstr ""
msgid "Sign in preview"
msgstr ""
@ -33811,6 +33814,9 @@ msgstr ""
msgid "This group, its subgroups and projects will be removed on %{date} since its parent group '%{parent_group_name}'' has been scheduled for removal."
msgstr ""
msgid "This invitation was sent to %{mail_to_invite_email}, but you are signed in as %{link_to_current_user} with email %{mail_to_current_user}."
msgstr ""
msgid "This is a \"Ghost User\", created to hold all issues authored by users that have since been deleted. This user cannot be removed."
msgstr ""
@ -37579,6 +37585,9 @@ msgstr ""
msgid "You are about to transfer the control of your account to %{group_name} group. This action is NOT reversible, you won't be able to access any of your groups and projects outside of %{group_name} once this transfer is complete."
msgstr ""
msgid "You are already a member of this %{member_source}."
msgstr ""
msgid "You are an admin, which means granting access to %{client_name} will allow them to interact with GitLab as an admin as well. Proceed with caution."
msgstr ""
@ -37903,7 +37912,7 @@ msgstr ""
msgid "You have been granted %{member_human_access} access to project %{name}."
msgstr ""
msgid "You have been invited"
msgid "You have been invited by %{link_to_inviter} to join %{source_name} %{strong_open}%{link_to_source}%{strong_close} as %{role}"
msgstr ""
msgid "You have been redirected to the only result; see the %{a_start}search results%{a_end} instead."
@ -38499,9 +38508,6 @@ msgstr ""
msgid "archived:"
msgstr ""
msgid "as %{role}."
msgstr ""
msgid "assign yourself"
msgstr ""
@ -38973,14 +38979,14 @@ msgstr ""
msgid "element is not a hierarchy"
msgstr ""
msgid "email '%{email}' does not match the allowed domain of %{email_domains}"
msgid_plural "email '%{email}' does not match the allowed domains: %{email_domains}"
msgstr[0] ""
msgstr[1] ""
msgid "email '%{email}' is not a verified email."
msgstr ""
msgid "email does not match the allowed domain of %{email_domains}"
msgid_plural "email does not match the allowed domains: %{email_domains}"
msgstr[0] ""
msgstr[1] ""
msgid "enabled"
msgstr ""
@ -39980,9 +39986,6 @@ msgstr ""
msgid "time summary"
msgstr ""
msgid "to join %{source_name}"
msgstr ""
msgid "toggle collapse"
msgstr ""

View File

@ -25,9 +25,64 @@ RSpec.describe InvitesController do
end
end
shared_examples 'invite email match enforcement' do |error_status:, flash_alert: nil|
it 'accepts user if invite email matches signed in user' do
expect do
request
end.to change { project_members.include?(user) }.from(false).to(true)
expect(response).to have_gitlab_http_status(:found)
expect(flash[:notice]).to include 'You have been granted'
end
it 'accepts invite if invite email matches confirmed secondary email' do
secondary_email = create(:email, :confirmed, user: user)
member.update!(invite_email: secondary_email.email)
expect do
request
end.to change { project_members.include?(user) }.from(false).to(true)
expect(response).to have_gitlab_http_status(:found)
expect(flash[:notice]).to include 'You have been granted'
end
it 'does not accept if invite email matches unconfirmed secondary email' do
secondary_email = create(:email, user: user)
member.update!(invite_email: secondary_email.email)
expect do
request
end.not_to change { project_members.include?(user) }
expect(response).to have_gitlab_http_status(error_status)
expect(flash[:alert]).to eq(flash_alert)
end
it 'does not accept if invite email does not match signed in user' do
member.update!(invite_email: 'bogus@email.com')
expect do
request
end.not_to change { project_members.include?(user) }
expect(response).to have_gitlab_http_status(error_status)
expect(flash[:alert]).to eq(flash_alert)
end
end
describe 'GET #show', :snowplow do
subject(:request) { get :show, params: params }
context 'when logged in' do
before do
sign_in(user)
end
it_behaves_like 'invite email match enforcement', error_status: :ok
it_behaves_like 'invalid token'
end
context 'when it is an initial invite email' do
let(:extra_params) { { invite_type: 'initial_email' } }
@ -69,34 +124,6 @@ RSpec.describe InvitesController do
end
end
context 'when logged in' do
before do
sign_in(user)
end
it 'accepts user if invite email matches signed in user' do
expect do
request
end.to change { project_members.include?(user) }.from(false).to(true)
expect(response).to have_gitlab_http_status(:found)
expect(flash[:notice]).to include 'You have been granted'
end
it 'forces re-confirmation if email does not match signed in user' do
member.update!(invite_email: 'bogus@email.com')
expect do
request
end.not_to change { project_members.include?(user) }
expect(response).to have_gitlab_http_status(:ok)
expect(flash[:notice]).to be_nil
end
it_behaves_like 'invalid token'
end
context 'when not logged in' do
context 'when invite token belongs to a valid member' do
context 'when instance allows sign up' do
@ -223,6 +250,7 @@ RSpec.describe InvitesController do
subject(:request) { post :accept, params: params }
it_behaves_like 'invite email match enforcement', error_status: :redirect, flash_alert: 'The invitation could not be accepted.'
it_behaves_like 'invalid token'
end

View File

@ -302,35 +302,46 @@ RSpec.describe Projects::PipelinesController do
end
describe 'GET #show' do
render_views
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
subject { get_pipeline_html }
def get_pipeline_html
get :show, params: { namespace_id: project.namespace, project_id: project, id: pipeline }, format: :html
end
def create_build_with_artifacts(stage, stage_idx, name)
create(:ci_build, :artifacts, :tags, pipeline: pipeline, stage: stage, stage_idx: stage_idx, name: name)
context 'when the project is public' do
render_views
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
def create_build_with_artifacts(stage, stage_idx, name)
create(:ci_build, :artifacts, :tags, pipeline: pipeline, stage: stage, stage_idx: stage_idx, name: name)
end
before do
create_build_with_artifacts('build', 0, 'job1')
create_build_with_artifacts('build', 0, 'job2')
end
it 'avoids N+1 database queries', :request_store do
control_count = ActiveRecord::QueryRecorder.new { get_pipeline_html }.count
expect(response).to have_gitlab_http_status(:ok)
create_build_with_artifacts('build', 0, 'job3')
expect { get_pipeline_html }.not_to exceed_query_limit(control_count)
expect(response).to have_gitlab_http_status(:ok)
end
end
before do
create_build_with_artifacts('build', 0, 'job1')
create_build_with_artifacts('build', 0, 'job2')
end
context 'when the project is private' do
let(:project) { create(:project, :private, :repository) }
let(:pipeline) { create(:ci_pipeline, project: project) }
it 'avoids N+1 database queries', :request_store do
get_pipeline_html
it 'returns `not_found` when the user does not have access' do
sign_in(create(:user))
control_count = ActiveRecord::QueryRecorder.new { get_pipeline_html }.count
expect(response).to have_gitlab_http_status(:ok)
get_pipeline_html
create_build_with_artifacts('build', 0, 'job3')
expect { get_pipeline_html }.not_to exceed_query_limit(control_count)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to have_gitlab_http_status(:not_found)
end
end
end

View File

@ -83,4 +83,16 @@ RSpec.describe 'Admin > Users > Impersonation Tokens', :js do
expect(no_personal_access_tokens_message).to have_text("This user has no active impersonation tokens.")
end
end
describe "impersonation disabled state" do
before do
stub_config_setting(impersonation_enabled: false)
end
it "does not show impersonation tokens tab" do
visit admin_user_path(user)
expect(page).not_to have_content("Impersonation Tokens")
end
end
end

View File

@ -3,16 +3,20 @@
require 'spec_helper'
RSpec.describe 'Dashboard > Todo target states' do
let(:user) { create(:user) }
let(:author) { create(:user) }
let(:project) { create(:project, :public) }
let_it_be(:user) { create(:user) }
let_it_be(:author) { create(:user) }
let_it_be(:project) { create(:project, :public) }
before_all do
project.add_developer(user)
end
before do
sign_in(user)
end
it 'on a closed issue todo has closed label' do
issue_closed = create(:issue, state: 'closed')
issue_closed = create(:issue, state: 'closed', project: project)
create_todo issue_closed
visit dashboard_todos_path
@ -22,7 +26,7 @@ RSpec.describe 'Dashboard > Todo target states' do
end
it 'on an open issue todo does not have an open label' do
issue_open = create(:issue)
issue_open = create(:issue, project: project)
create_todo issue_open
visit dashboard_todos_path
@ -32,7 +36,7 @@ RSpec.describe 'Dashboard > Todo target states' do
end
it 'on a merged merge request todo has merged label' do
mr_merged = create(:merge_request, :simple, :merged, author: user)
mr_merged = create(:merge_request, :simple, :merged, author: user, source_project: project)
create_todo mr_merged
visit dashboard_todos_path
@ -42,7 +46,7 @@ RSpec.describe 'Dashboard > Todo target states' do
end
it 'on a closed merge request todo has closed label' do
mr_closed = create(:merge_request, :simple, :closed, author: user)
mr_closed = create(:merge_request, :simple, :closed, author: user, source_project: project)
create_todo mr_closed
visit dashboard_todos_path
@ -52,7 +56,7 @@ RSpec.describe 'Dashboard > Todo target states' do
end
it 'on an open merge request todo does not have an open label' do
mr_open = create(:merge_request, :simple, author: user)
mr_open = create(:merge_request, :simple, author: user, source_project: project)
create_todo mr_open
visit dashboard_todos_path

View File

@ -128,7 +128,7 @@ RSpec.describe 'Dashboard > User filters todos', :js do
describe 'filter by action' do
before do
create(:todo, :build_failed, user: user_1, author: user_2, project: project_1)
create(:todo, :build_failed, user: user_1, author: user_2, project: project_1, target: merge_request)
create(:todo, :marked, user: user_1, author: user_2, project: project_1, target: issue1)
create(:todo, :review_requested, user: user_1, author: user_2, project: project_1, target: issue1)
end

View File

@ -3,10 +3,16 @@
require 'spec_helper'
RSpec.describe 'Dashboard Todos' do
include DesignManagementTestHelpers
let_it_be(:user) { create(:user, username: 'john') }
let_it_be(:author) { create(:user) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, due_date: Date.today, title: "Fix bug") }
let_it_be(:issue) { create(:issue, project: project, due_date: Date.today, title: "Fix bug") }
before_all do
project.add_developer(user)
end
context 'User does not have todos' do
before do
@ -21,8 +27,8 @@ RSpec.describe 'Dashboard Todos' do
context 'when the todo references a merge request' do
let(:referenced_mr) { create(:merge_request, source_project: project) }
let(:note) { create(:note, project: project, note: "Check out #{referenced_mr.to_reference}") }
let!(:todo) { create(:todo, :mentioned, user: user, project: project, author: author, note: note) }
let(:note) { create(:note, project: project, note: "Check out #{referenced_mr.to_reference}", noteable: create(:issue, project: project)) }
let!(:todo) { create(:todo, :mentioned, user: user, project: project, author: author, note: note, target: note.noteable) }
before do
sign_in(user)
@ -39,9 +45,26 @@ RSpec.describe 'Dashboard Todos' do
end
end
context 'User has a todo', :js do
context 'user has an unauthorized todo' do
before do
sign_in(user)
end
it 'does not render the todo' do
unauthorized_issue = create(:issue)
create(:todo, :mentioned, user: user, project: unauthorized_issue.project, target: unauthorized_issue, author: author)
create(:todo, :mentioned, user: user, project: project, target: issue, author: author)
visit dashboard_todos_path
expect(page).to have_selector('.todos-list .todo', count: 1)
end
end
context 'User has a todo', :js do
let_it_be(:user_todo) { create(:todo, :mentioned, user: user, project: project, target: issue, author: author) }
before do
sign_in(user)
visit dashboard_todos_path
@ -183,7 +206,7 @@ RSpec.describe 'Dashboard Todos' do
end
context 'approval todo' do
let(:merge_request) { create(:merge_request, title: "Fixes issue") }
let(:merge_request) { create(:merge_request, title: "Fixes issue", source_project: project) }
before do
create(:todo, :approval_required, user: user, project: project, target: merge_request, author: user)
@ -199,7 +222,7 @@ RSpec.describe 'Dashboard Todos' do
end
context 'review request todo' do
let(:merge_request) { create(:merge_request, title: "Fixes issue") }
let(:merge_request) { create(:merge_request, title: "Fixes issue", source_project: project) }
before do
create(:todo, :review_requested, user: user, project: project, target: merge_request, author: user)
@ -355,7 +378,7 @@ RSpec.describe 'Dashboard Todos' do
end
context 'User has a Build Failed todo' do
let!(:todo) { create(:todo, :build_failed, user: user, project: project, author: author) }
let!(:todo) { create(:todo, :build_failed, user: user, project: project, author: author, target: create(:merge_request, source_project: project)) }
before do
sign_in(user)
@ -386,6 +409,7 @@ RSpec.describe 'Dashboard Todos' do
end
before do
enable_design_management
project.add_developer(user)
sign_in(user)

View File

@ -90,48 +90,17 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
end
context 'when signed in and an invite link is clicked' do
context 'when an invite email is a secondary email for the user' do
let(:invite_email) { 'user_secondary@example.com' }
before do
sign_in(user)
visit invite_path(group_invite.raw_invite_token)
end
it 'sends user to the invite url and allows them to decline' do
expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
expect(page).to have_content("Note that this invitation was sent to #{invite_email}")
expect(page).to have_content("but you are signed in as #{user.to_reference} with email #{user.email}")
click_link('Decline')
expect(page).to have_content('You have declined the invitation')
expect(current_path).to eq(dashboard_projects_path)
expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
it 'sends uer to the invite url and allows them to accept' do
expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
expect(page).to have_content("Note that this invitation was sent to #{invite_email}")
expect(page).to have_content("but you are signed in as #{user.to_reference} with email #{user.email}")
click_link('Accept invitation')
expect(page).to have_content('You have been granted')
expect(current_path).to eq(activity_group_path(group))
end
end
context 'when user is an existing member' do
before do
sign_in(owner)
group.add_developer(user)
sign_in(user)
visit invite_path(group_invite.raw_invite_token)
end
it 'shows message user already a member' do
expect(current_path).to eq(invite_path(group_invite.raw_invite_token))
expect(page).to have_link(owner.name, href: user_url(owner))
expect(page).to have_content('However, you are already a member of this group.')
expect(page).to have_link(user.name, href: user_path(user))
expect(page).to have_content('You are already a member of this group.')
end
end
end

View File

@ -260,8 +260,6 @@ RSpec.describe 'Mermaid rendering', :js do
description *= 51
project = create(:project, :public)
wiki_page = build(:wiki_page, { container: project, content: description })
wiki_page.create message: 'mermaid test commit' # rubocop:disable Rails/SaveBang
wiki_page = project.wiki.find_page(wiki_page.slug)
@ -277,6 +275,27 @@ RSpec.describe 'Mermaid rendering', :js do
expect(page).not_to have_selector('.js-lazy-render-mermaid-container')
end
end
it 'does not allow HTML injection' do
description = <<~MERMAID
```mermaid
%%{init: {"flowchart": {"htmlLabels": "false"}} }%%
flowchart
A["<iframe></iframe>"]
```
MERMAID
issue = create(:issue, project: project, description: description)
visit project_issue_path(project, issue)
wait_for_requests
wait_for_mermaid
page.within('.description') do
expect(page).not_to have_xpath("//iframe")
end
end
end
def wait_for_mermaid

View File

@ -365,9 +365,8 @@ RSpec.describe 'Pipeline', :js do
let(:project) { create(:project, :public, :repository, public_builds: false) }
let(:role) { :guest }
it 'does not show failed jobs tab pane' do
expect(page).to have_link('Pipeline')
expect(page).not_to have_content('Failed Jobs')
it 'does not show the pipeline details page' do
expect(page).to have_content('Not Found')
end
end
end

View File

@ -44,8 +44,7 @@ describe('Vuex members mutations', () => {
describe('when error has a message', () => {
it('shows error message', () => {
const error = new Error('Request failed with status code 422');
const message =
'User email "john.smith@gmail.com" does not match the allowed domain of example.com';
const message = 'User email does not match the allowed domain of example.com';
error.response = {
data: { message },
@ -88,8 +87,7 @@ describe('Vuex members mutations', () => {
describe('when error has a message', () => {
it('shows error message', () => {
const error = new Error('Request failed with status code 422');
const message =
'User email "john.smith@gmail.com" does not match the allowed domain of example.com';
const message = 'User email does not match the allowed domain of example.com';
error.response = {
data: { message },

View File

@ -105,7 +105,7 @@ RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
describe 'with a single permission' do
let(:type) do
type_factory do |type|
type.field :name, GraphQL::STRING_TYPE, null: true, authorize: permission_single
type.field :name, GraphQL::Types::String, null: true, authorize: permission_single
end
end
@ -124,7 +124,7 @@ RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
let(:type) do
permissions = permission_collection
type_factory do |type|
type.field :name, GraphQL::STRING_TYPE,
type.field :name, GraphQL::Types::String,
null: true,
authorize: permissions
end
@ -332,7 +332,7 @@ RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
type_factory do |type|
type.graphql_name 'FakeIssueType'
type.authorize :read_issue
type.field :id, GraphQL::ID_TYPE, null: false
type.field :id, GraphQL::Types::ID, null: false
end
end

View File

@ -15,7 +15,7 @@ RSpec.describe ::Mutations::BaseMutation do
context 'when argument is nullable and required' do
let(:mutation_class) do
Class.new(described_class) do
argument :foo, GraphQL::STRING_TYPE, required: :nullable
argument :foo, GraphQL::Types::String, required: :nullable
end
end
@ -35,7 +35,7 @@ RSpec.describe ::Mutations::BaseMutation do
context 'when argument is required and NOT nullable' do
let(:mutation_class) do
Class.new(described_class) do
argument :foo, GraphQL::STRING_TYPE, required: true
argument :foo, GraphQL::Types::String, required: true
end
end

View File

@ -5,17 +5,23 @@ require 'spec_helper'
RSpec.describe Mutations::Todos::MarkDone do
include GraphqlHelpers
let_it_be(:project) { create(:project) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:current_user) { create(:user) }
let_it_be(:author) { create(:user) }
let_it_be(:other_user) { create(:user) }
let_it_be(:todo1) { create(:todo, user: current_user, author: author, state: :pending) }
let_it_be(:todo2) { create(:todo, user: current_user, author: author, state: :done) }
let_it_be(:todo1) { create(:todo, user: current_user, author: author, state: :pending, target: issue) }
let_it_be(:todo2) { create(:todo, user: current_user, author: author, state: :done, target: issue) }
let_it_be(:other_user_todo) { create(:todo, user: other_user, author: author, state: :pending) }
let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
before_all do
project.add_developer(current_user)
end
specify { expect(described_class).to require_graphql_authorizations(:update_todo) }
describe '#resolve' do

View File

@ -5,17 +5,23 @@ require 'spec_helper'
RSpec.describe Mutations::Todos::Restore do
include GraphqlHelpers
let_it_be(:project) { create(:project) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:current_user) { create(:user) }
let_it_be(:author) { create(:user) }
let_it_be(:other_user) { create(:user) }
let_it_be(:todo1) { create(:todo, user: current_user, author: author, state: :done) }
let_it_be(:todo2) { create(:todo, user: current_user, author: author, state: :pending) }
let_it_be(:todo1) { create(:todo, user: current_user, author: author, state: :done, target: issue) }
let_it_be(:todo2) { create(:todo, user: current_user, author: author, state: :pending, target: issue) }
let_it_be(:other_user_todo) { create(:todo, user: other_user, author: author, state: :done) }
let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
before_all do
project.add_developer(current_user)
end
specify { expect(described_class).to require_graphql_authorizations(:update_todo) }
describe '#resolve' do

View File

@ -5,14 +5,24 @@ require 'spec_helper'
RSpec.describe Resolvers::ProjectPipelineStatisticsResolver do
include GraphqlHelpers
let_it_be(:project) { create(:project) }
let_it_be(:project) { create(:project, :private) }
let_it_be(:guest) { create(:user) }
let_it_be(:reporter) { create(:user) }
let(:current_user) { reporter }
before_all do
project.add_guest(guest)
project.add_reporter(reporter)
end
specify do
expect(described_class).to have_nullable_graphql_type(::Types::Ci::AnalyticsType)
end
def resolve_statistics(project, args)
resolve(described_class, obj: project, args: args)
ctx = { current_user: current_user }
resolve(described_class, obj: project, args: args, ctx: ctx)
end
describe '#resolve' do
@ -32,5 +42,15 @@ RSpec.describe Resolvers::ProjectPipelineStatisticsResolver do
:pipeline_times_values
)
end
context 'when the user does not have access to the CI/CD analytics data' do
let(:current_user) { guest }
it 'returns nil' do
result = resolve_statistics(project, {})
expect(result).to be_nil
end
end
end
end

View File

@ -4,19 +4,28 @@ require 'spec_helper'
RSpec.describe Resolvers::TodoResolver do
include GraphqlHelpers
include DesignManagementTestHelpers
specify do
expect(described_class).to have_nullable_graphql_type(Types::TodoType.connection_type)
end
describe '#resolve' do
let_it_be(:project) { create(:project) }
let_it_be(:current_user) { create(:user) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:author1) { create(:user) }
let_it_be(:author2) { create(:user) }
let_it_be(:merge_request_todo_pending) { create(:todo, user: current_user, target_type: 'MergeRequest', state: :pending, action: Todo::MENTIONED, author: author1) }
let_it_be(:issue_todo_done) { create(:todo, user: current_user, state: :done, action: Todo::ASSIGNED, author: author2) }
let_it_be(:issue_todo_pending) { create(:todo, user: current_user, state: :pending, action: Todo::ASSIGNED, author: author1) }
let_it_be(:issue_todo_done) { create(:todo, user: current_user, state: :done, action: Todo::ASSIGNED, author: author2, target: issue) }
let_it_be(:issue_todo_pending) { create(:todo, user: current_user, state: :pending, action: Todo::ASSIGNED, author: author1, target: issue) }
let(:merge_request) { create(:merge_request, source_project: project) }
let!(:merge_request_todo_pending) { create(:todo, user: current_user, target: merge_request, state: :pending, action: Todo::MENTIONED, author: author1) }
before_all do
project.add_developer(current_user)
end
it 'calls TodosFinder' do
expect_next_instance_of(TodosFinder) do |finder|
@ -40,7 +49,9 @@ RSpec.describe Resolvers::TodoResolver do
end
it 'returns the todos for multiple filters' do
design_todo_pending = create(:todo, target_type: 'DesignManagement::Design', user: current_user, state: :pending, action: Todo::ASSIGNED, author: author1)
enable_design_management
design = create(:design, issue: issue)
design_todo_pending = create(:todo, target: design, user: current_user, state: :pending, action: Todo::ASSIGNED, author: author1)
todos = resolve_todos(type: ['MergeRequest', 'DesignManagement::Design'])
@ -59,11 +70,15 @@ RSpec.describe Resolvers::TodoResolver do
group3 = create(:group)
group1.add_developer(current_user)
issue1 = create(:issue, project: create(:project, group: group1))
group2.add_developer(current_user)
issue2 = create(:issue, project: create(:project, group: group2))
group3.add_developer(current_user)
issue3 = create(:issue, project: create(:project, group: group3))
todo4 = create(:todo, group: group1, user: current_user, state: :pending, action: Todo::ASSIGNED, author: author1)
todo5 = create(:todo, group: group2, user: current_user, state: :pending, action: Todo::ASSIGNED, author: author1)
create(:todo, group: group3, user: current_user, state: :pending, action: Todo::ASSIGNED, author: author1)
todo4 = create(:todo, group: group1, user: current_user, state: :pending, action: Todo::ASSIGNED, author: author1, target: issue1)
todo5 = create(:todo, group: group2, user: current_user, state: :pending, action: Todo::ASSIGNED, author: author1, target: issue2)
create(:todo, group: group3, user: current_user, state: :pending, action: Todo::ASSIGNED, author: author1, target: issue3)
todos = resolve_todos(group_id: [group2.id, group1.id])
@ -93,9 +108,13 @@ RSpec.describe Resolvers::TodoResolver do
project2 = create(:project)
project3 = create(:project)
todo4 = create(:todo, project: project1, user: current_user, state: :pending, action: Todo::ASSIGNED, author: author1)
todo5 = create(:todo, project: project2, user: current_user, state: :pending, action: Todo::ASSIGNED, author: author1)
create(:todo, project: project3, user: current_user, state: :pending, action: Todo::ASSIGNED, author: author1)
project1.add_developer(current_user)
project2.add_developer(current_user)
project3.add_developer(current_user)
todo4 = create(:todo, project: project1, user: current_user, state: :pending, action: Todo::ASSIGNED, author: author1, target: create(:issue, project: project1))
todo5 = create(:todo, project: project2, user: current_user, state: :pending, action: Todo::ASSIGNED, author: author1, target: create(:issue, project: project2))
create(:todo, project: project3, user: current_user, state: :pending, action: Todo::ASSIGNED, author: author1, target: create(:issue, project: project3))
todos = resolve_todos(project_id: [project2.id, project1.id])

View File

@ -40,7 +40,7 @@ RSpec.describe Subscriptions::IssuableUpdated do
end
end
context 'when a GraphQL::ID_TYPE is provided' do
context 'when a GraphQL::Types::ID is provided' do
let(:issuable_id) { issue.to_gid.to_s }
it 'raises an exception' do

View File

@ -336,6 +336,15 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
expect_results_with_abilities(impersonation_token, described_class.full_authentication_abilities)
end
it 'fails if it is an impersonation token but impersonation is blocked' do
stub_config_setting(impersonation_enabled: false)
impersonation_token = create(:personal_access_token, :impersonation, scopes: ['api'])
expect(gl_auth.find_for_git_client('', impersonation_token.token, project: nil, ip: 'ip'))
.to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
end
it 'limits abilities based on scope' do
personal_access_token = create(:personal_access_token, scopes: %w[read_user sudo])

View File

@ -136,7 +136,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Build do
let(:command) do
Gitlab::Ci::Pipeline::Chain::Command.new(
source: :push,
origin_ref: 'mytag',
origin_ref: origin_ref,
checkout_sha: project.commit.id,
after_sha: nil,
before_sha: nil,
@ -147,6 +147,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Build do
current_user: user)
end
let(:origin_ref) { 'mytag' }
before do
allow_any_instance_of(Repository).to receive(:tag_exists?).with('mytag').and_return(true)
@ -156,6 +158,14 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Build do
it 'correctly indicated that this is a tagged pipeline' do
expect(pipeline).to be_tag
end
context 'when origin_ref is branch but tag ref with the same name exists' do
let(:origin_ref) { 'refs/heads/mytag' }
it 'correctly indicated that a pipeline is not tagged' do
expect(pipeline).not_to be_tag
end
end
end
context 'when pipeline is running for a merge request' do

View File

@ -27,6 +27,18 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Command do
it { is_expected.to eq(true) }
end
context 'for fully described tag ref' do
let(:origin_ref) { 'refs/tags/master' }
it { is_expected.to eq(false) }
end
context 'for fully described branch ref' do
let(:origin_ref) { 'refs/heads/master' }
it { is_expected.to eq(true) }
end
context 'for invalid branch' do
let(:origin_ref) { 'something' }
@ -43,6 +55,18 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Command do
it { is_expected.to eq(true) }
end
context 'for fully described tag ref' do
let(:origin_ref) { 'refs/tags/v1.0.0' }
it { is_expected.to eq(true) }
end
context 'for fully described branch ref' do
let(:origin_ref) { 'refs/heads/v1.0.0' }
it { is_expected.to eq(false) }
end
context 'for invalid ref' do
let(:origin_ref) { 'something' }

View File

@ -138,6 +138,25 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
expect(issue.assignees).to be_empty
expect(issue.milestone).to be_nil
end
context 'when issues are set to private' do
before do
project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE)
end
it 'applies quick action commands present on templates' do
file_content = %(Text from service_desk2 template \n/label ~#{label.title} \n/milestone %"#{milestone.name}")
set_template_file('service_desk2', file_content)
receiver.execute
issue = Issue.last
expect(issue.description).to include('Text from service_desk2 template')
expect(issue.label_ids).to include(label.id)
expect(issue.author_id).to eq(User.support_bot.id)
expect(issue.milestone).to eq(milestone)
end
end
end
end

View File

@ -7,7 +7,7 @@ RSpec.describe Gitlab::Graphql::CallsGitaly::FieldExtension, :request_store do
let(:field_args) { {} }
let(:owner) { fresh_object_type }
let(:field) do
::Types::BaseField.new(name: 'value', type: GraphQL::STRING_TYPE, null: true, owner: owner, **field_args)
::Types::BaseField.new(name: 'value', type: GraphQL::Types::String, null: true, owner: owner, **field_args)
end
def resolve_value

View File

@ -10,7 +10,7 @@ RSpec.describe Gitlab::Graphql::CopyFieldDescription do
Class.new(Types::BaseObject) do
graphql_name "TestType"
field :field_name, GraphQL::STRING_TYPE, null: true, description: 'Foo'
field :field_name, GraphQL::Types::String, null: true, description: 'Foo'
end
end

View File

@ -6,8 +6,8 @@ RSpec.describe Gitlab::Graphql::MountMutation do
Class.new(Mutations::BaseMutation) do
graphql_name 'TestMutation'
argument :foo, GraphQL::STRING_TYPE, required: false
field :bar, GraphQL::STRING_TYPE, null: true
argument :foo, GraphQL::Types::String, required: false
field :bar, GraphQL::Types::String, null: true
end
end

View File

@ -19,7 +19,7 @@ RSpec.describe Gitlab::Graphql::NegatableArguments do
it 'defines any arguments passed as block' do
test_resolver.negated do
argument :foo, GraphQL::STRING_TYPE, required: false
argument :foo, GraphQL::Types::String, required: false
end
expect(test_resolver.arguments['not'].type.arguments.keys).to match_array(['foo'])
@ -27,10 +27,10 @@ RSpec.describe Gitlab::Graphql::NegatableArguments do
it 'defines all arguments passed as block even if called multiple times' do
test_resolver.negated do
argument :foo, GraphQL::STRING_TYPE, required: false
argument :foo, GraphQL::Types::String, required: false
end
test_resolver.negated do
argument :bar, GraphQL::STRING_TYPE, required: false
argument :bar, GraphQL::Types::String, required: false
end
expect(test_resolver.arguments['not'].type.arguments.keys).to match_array(%w[foo bar])

View File

@ -33,7 +33,7 @@ RSpec.describe ::Gitlab::Graphql::Pagination::Connections do
let(:node_type) do
Class.new(::GraphQL::Schema::Object) do
graphql_name 'Node'
field :value, GraphQL::INT_TYPE, null: false
field :value, GraphQL::Types::Int, null: false
end
end

View File

@ -10,7 +10,7 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
let(:owner) { fresh_object_type }
let(:field_name) { 'value' }
let(:field) do
::Types::BaseField.new(name: field_name, type: GraphQL::STRING_TYPE, null: true, owner: owner)
::Types::BaseField.new(name: field_name, type: GraphQL::Types::String, null: true, owner: owner)
end
let(:base_presenter) do
@ -38,7 +38,7 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
Module.new do
include ::Types::BaseInterface
field :interface_field, GraphQL::STRING_TYPE, null: true
field :interface_field, GraphQL::Types::String, null: true
end
end
@ -58,7 +58,7 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
end
it 'resolves the interface field using the implementation from the presenter' do
field = ::Types::BaseField.new(name: :interface_field, type: GraphQL::STRING_TYPE, null: true, owner: interface)
field = ::Types::BaseField.new(name: :interface_field, type: GraphQL::Types::String, null: true, owner: interface)
value = resolve_field(field, object, object_type: implementation)
expect(value).to eq 'made of concrete'
@ -67,7 +67,7 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
context 'when the implementation is inherited' do
it 'resolves the interface field using the implementation from the presenter' do
subclass = Class.new(implementation) { graphql_name 'Subclass' }
field = ::Types::BaseField.new(name: :interface_field, type: GraphQL::STRING_TYPE, null: true, owner: interface)
field = ::Types::BaseField.new(name: :interface_field, type: GraphQL::Types::String, null: true, owner: interface)
value = resolve_field(field, object, object_type: subclass)
expect(value).to eq 'made of concrete'
@ -79,8 +79,8 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
def parent
type = fresh_object_type('Parent')
type.present_using(provide_foo)
type.field :foo, ::GraphQL::INT_TYPE, null: true
type.field :value, ::GraphQL::STRING_TYPE, null: true
type.field :foo, ::GraphQL::Types::Int, null: true
type.field :value, ::GraphQL::Types::String, null: true
type
end
@ -88,7 +88,7 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
type = Class.new(parent)
type.graphql_name 'Child'
type.present_using(provide_bar)
type.field :bar, ::GraphQL::INT_TYPE, null: true
type.field :bar, ::GraphQL::Types::Int, null: true
type
end
@ -150,7 +150,7 @@ RSpec.describe Gitlab::Graphql::Present::FieldExtension do
let(:field) do
::Types::BaseField.new(
name: field_name,
type: GraphQL::STRING_TYPE,
type: GraphQL::Types::String,
null: true,
owner: owner,
resolve: ->(obj, args, ctx) { 'Hello from a proc' }

View File

@ -21,30 +21,30 @@ RSpec.describe Gitlab::Graphql::Queries do
let_it_be(:schema) do
author = Class.new(GraphQL::Schema::Object) do
graphql_name 'Author'
field :name, GraphQL::STRING_TYPE, null: true
field :handle, GraphQL::STRING_TYPE, null: false
field :verified, GraphQL::BOOLEAN_TYPE, null: false
field :name, GraphQL::Types::String, null: true
field :handle, GraphQL::Types::String, null: false
field :verified, GraphQL::Types::Boolean, null: false
end
post = Class.new(GraphQL::Schema::Object) do
graphql_name 'Post'
field :name, GraphQL::STRING_TYPE, null: false
field :title, GraphQL::STRING_TYPE, null: false
field :content, GraphQL::STRING_TYPE, null: true
field :name, GraphQL::Types::String, null: false
field :title, GraphQL::Types::String, null: false
field :content, GraphQL::Types::String, null: true
field :author, author, null: false
end
author.field :posts, [post], null: false do
argument :blog_title, GraphQL::STRING_TYPE, required: false
argument :blog_title, GraphQL::Types::String, required: false
end
blog = Class.new(GraphQL::Schema::Object) do
graphql_name 'Blog'
field :title, GraphQL::STRING_TYPE, null: false
field :description, GraphQL::STRING_TYPE, null: false
field :title, GraphQL::Types::String, null: false
field :description, GraphQL::Types::String, null: false
field :main_author, author, null: false
field :posts, [post], null: false
field :post, post, null: true do
argument :slug, GraphQL::STRING_TYPE, required: true
argument :slug, GraphQL::Types::String, required: true
end
end
@ -52,10 +52,10 @@ RSpec.describe Gitlab::Graphql::Queries do
query(Class.new(GraphQL::Schema::Object) do
graphql_name 'Query'
field :blog, blog, null: true do
argument :title, GraphQL::STRING_TYPE, required: true
argument :title, GraphQL::Types::String, required: true
end
field :post, post, null: true do
argument :slug, GraphQL::STRING_TYPE, required: true
argument :slug, GraphQL::Types::String, required: true
end
end)
end

View File

@ -228,4 +228,16 @@ RSpec.describe Gitlab::MarkdownCache::ActiveRecord::Extension do
thing.refresh_markdown_cache!
end
end
context 'when persisted cache is nil' do
before do
thing.update_column(:cached_markdown_version, nil)
end
it 'does not save the generated HTML' do
expect(thing).to receive(:update_columns)
thing.refresh_markdown_cache!
end
end
end

View File

@ -1,12 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::OmniauthLogging::JSONFormatter do
it "generates log in json format" do
Timecop.freeze(Time.utc(2019, 12, 04, 9, 10, 11, 123456)) do
expect(subject.call(:info, Time.now, 'omniauth', 'log message'))
.to eq %Q({"severity":"info","timestamp":"2019-12-04T09:10:11.123Z","pid":#{Process.pid},"progname":"omniauth","message":"log message"}\n)
end
end
end

View File

@ -4,15 +4,19 @@ require 'spec_helper'
RSpec.describe Sidebars::Projects::Menus::AnalyticsMenu do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:guest) do
create(:user).tap { |u| project.add_guest(u) }
end
let(:user) { project.owner }
let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project, current_ref: project.repository.root_ref) }
let(:owner) { project.owner }
let(:current_user) { owner }
let(:context) { Sidebars::Projects::Context.new(current_user: current_user, container: project, current_ref: project.repository.root_ref) }
subject { described_class.new(context) }
describe '#render?' do
context 'whe user cannot read analytics' do
let(:user) { nil }
let(:current_user) { nil }
it 'returns false' do
expect(subject.render?).to be false
@ -79,7 +83,7 @@ RSpec.describe Sidebars::Projects::Menus::AnalyticsMenu do
end
describe 'when the user does not have access' do
let(:user) { nil }
let(:current_user) { guest }
specify { is_expected.to be_nil }
end
@ -99,7 +103,7 @@ RSpec.describe Sidebars::Projects::Menus::AnalyticsMenu do
end
describe 'when the user does not have access' do
let(:user) { nil }
let(:current_user) { nil }
specify { is_expected.to be_nil }
end
@ -111,7 +115,7 @@ RSpec.describe Sidebars::Projects::Menus::AnalyticsMenu do
specify { is_expected.not_to be_nil }
describe 'when the user does not have access' do
let(:user) { nil }
let(:current_user) { nil }
specify { is_expected.to be_nil }
end

View File

@ -552,4 +552,10 @@ RSpec.describe DiffNote do
expect(subject.on_image?).to be_truthy
end
end
describe '#to_ability_name' do
subject { described_class.new.to_ability_name }
it { is_expected.to eq('note') }
end
end

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe DiscussionNote do
describe '#to_ability_name' do
subject { described_class.new.to_ability_name }
it { is_expected.to eq('note') }
end
end

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe LegacyDiffNote do
describe '#to_ability_name' do
subject { described_class.new.to_ability_name }
it { is_expected.to eq('note') }
end
end

View File

@ -2421,39 +2421,20 @@ RSpec.describe Project, factory_default: :keep do
let_it_be_with_reload(:project) { create(:project) }
it 'updates project_feature', :aggregate_failures do
# Simulate an existing project that has container_registry enabled
project.update_column(:container_registry_enabled, true)
project.project_feature.update_column(:container_registry_access_level, ProjectFeature::ENABLED)
project.update!(container_registry_enabled: false)
expect(project.read_attribute(:container_registry_enabled)).to eq(false)
expect(project.project_feature.container_registry_access_level).to eq(ProjectFeature::DISABLED)
project.update!(container_registry_enabled: true)
expect(project.read_attribute(:container_registry_enabled)).to eq(true)
expect(project.project_feature.container_registry_access_level).to eq(ProjectFeature::ENABLED)
end
it 'rollsback both projects and project_features row in case of error', :aggregate_failures do
project.update_column(:container_registry_enabled, true)
project.project_feature.update_column(:container_registry_access_level, ProjectFeature::ENABLED)
allow(project).to receive(:valid?).and_return(false)
expect { project.update!(container_registry_enabled: false) }.to raise_error(ActiveRecord::RecordInvalid)
expect(project.reload.read_attribute(:container_registry_enabled)).to eq(true)
expect(project.project_feature.reload.container_registry_access_level).to eq(ProjectFeature::ENABLED)
end
end
describe '#container_registry_enabled' do
let_it_be_with_reload(:project) { create(:project) }
it 'delegates to project_feature', :aggregate_failures do
project.update_column(:container_registry_enabled, true)
project.project_feature.update_column(:container_registry_access_level, ProjectFeature::DISABLED)
expect(project.container_registry_enabled).to eq(false)

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe SyntheticNote do
describe '#to_ability_name' do
subject { described_class.new.to_ability_name }
it { is_expected.to eq('note') }
end
end

View File

@ -11,13 +11,37 @@ RSpec.describe IssuePolicy do
let(:reporter) { create(:user) }
let(:group) { create(:group, :public) }
let(:reporter_from_group_link) { create(:user) }
let(:non_member) { create(:user) }
let(:support_bot) { User.support_bot }
def permissions(user, issue)
described_class.new(user, issue)
end
shared_examples 'support bot with service desk enabled' do
before do
allow(::Gitlab::IncomingEmail).to receive(:enabled?) { true }
allow(::Gitlab::IncomingEmail).to receive(:supports_wildcard?) { true }
project.update!(service_desk_enabled: true)
end
it 'allows support_bot to read issues, create and set metadata on new issues' do
expect(permissions(support_bot, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata)
expect(permissions(support_bot, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata)
expect(permissions(support_bot, new_issue)).to be_allowed(:create_issue, :set_issue_metadata)
end
end
shared_examples 'support bot with service desk disabled' do
it 'allows support_bot to read issues, create and set metadata on new issues' do
expect(permissions(support_bot, issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata)
expect(permissions(support_bot, issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata)
expect(permissions(support_bot, new_issue)).to be_disallowed(:create_issue, :set_issue_metadata)
end
end
context 'a private project' do
let(:non_member) { create(:user) }
let(:project) { create(:project, :private) }
let(:issue) { create(:issue, project: project, assignees: [assignee], author: author) }
let(:issue_no_assignee) { create(:issue, project: project) }
@ -34,12 +58,6 @@ RSpec.describe IssuePolicy do
create(:project_group_link, group: group, project: project)
end
it 'does not allow non-members to read issues' do
expect(permissions(non_member, issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata)
expect(permissions(non_member, issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata)
expect(permissions(non_member, new_issue)).to be_disallowed(:create_issue, :set_issue_metadata)
end
it 'allows guests to read issues' do
expect(permissions(guest, issue)).to be_allowed(:read_issue, :read_issue_iid)
expect(permissions(guest, issue)).to be_disallowed(:update_issue, :admin_issue, :set_issue_metadata)
@ -82,6 +100,15 @@ RSpec.describe IssuePolicy do
expect(permissions(assignee, new_issue)).to be_allowed(:create_issue, :set_issue_metadata)
end
it 'does not allow non-members to read, update or create issues' do
expect(permissions(non_member, issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata)
expect(permissions(non_member, issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata)
expect(permissions(non_member, new_issue)).to be_disallowed(:create_issue, :set_issue_metadata)
end
it_behaves_like 'support bot with service desk disabled'
it_behaves_like 'support bot with service desk enabled'
context 'with confidential issues' do
let(:confidential_issue) { create(:issue, :confidential, project: project, assignees: [assignee], author: author) }
let(:confidential_issue_no_assignee) { create(:issue, :confidential, project: project) }
@ -196,7 +223,8 @@ RSpec.describe IssuePolicy do
expect(permissions(author, issue_locked)).to be_allowed(:read_issue, :read_issue_iid, :update_issue)
expect(permissions(author, issue_locked)).to be_disallowed(:admin_issue, :reopen_issue, :set_issue_metadata)
expect(permissions(author, new_issue)).to be_allowed(:create_issue, :set_issue_metadata)
expect(permissions(author, new_issue)).to be_allowed(:create_issue)
expect(permissions(author, new_issue)).to be_disallowed(:set_issue_metadata)
end
it 'allows issue assignees to read, reopen and update their issues' do
@ -208,14 +236,44 @@ RSpec.describe IssuePolicy do
expect(permissions(assignee, issue_locked)).to be_allowed(:read_issue, :read_issue_iid, :update_issue)
expect(permissions(assignee, issue_locked)).to be_disallowed(:admin_issue, :reopen_issue, :set_issue_metadata)
expect(permissions(author, new_issue)).to be_allowed(:create_issue, :set_issue_metadata)
end
it 'allows non-members to read and create issues' do
expect(permissions(non_member, issue)).to be_allowed(:read_issue, :read_issue_iid)
expect(permissions(non_member, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid)
expect(permissions(non_member, new_issue)).to be_allowed(:create_issue)
end
it 'allows non-members to read issues' do
expect(permissions(non_member, issue)).to be_allowed(:read_issue, :read_issue_iid)
expect(permissions(non_member, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid)
end
it 'does not allow non-members to update, admin or set metadata' do
expect(permissions(non_member, issue)).to be_disallowed(:update_issue, :admin_issue, :set_issue_metadata)
expect(permissions(non_member, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue, :set_issue_metadata)
expect(permissions(non_member, new_issue)).to be_disallowed(:set_issue_metadata)
end
it 'allows support_bot to read issues' do
# support_bot is still allowed read access in public projects through :public_access permission,
# see project_policy public_access rules policy (rule { can?(:public_access) }.policy {...})
expect(permissions(support_bot, issue)).to be_allowed(:read_issue, :read_issue_iid)
expect(permissions(support_bot, issue)).to be_disallowed(:update_issue, :admin_issue, :set_issue_metadata)
expect(permissions(support_bot, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid)
expect(permissions(support_bot, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue, :set_issue_metadata)
expect(permissions(support_bot, new_issue)).to be_disallowed(:create_issue, :set_issue_metadata)
end
it_behaves_like 'support bot with service desk enabled'
context 'when issues are private' do
before do
project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE)
end
let(:issue) { create(:issue, project: project, author: author) }
let(:visitor) { create(:user) }
let(:admin) { create(:user, :admin) }
@ -258,6 +316,15 @@ RSpec.describe IssuePolicy do
expect(permissions(admin, issue)).to be_disallowed(:create_note)
end
end
it 'does not allow non-members to update or create issues' do
expect(permissions(non_member, issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata)
expect(permissions(non_member, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue, :set_issue_metadata)
expect(permissions(non_member, new_issue)).to be_disallowed(:create_issue, :set_issue_metadata)
end
it_behaves_like 'support bot with service desk disabled'
it_behaves_like 'support bot with service desk enabled'
end
context 'with confidential issues' do

View File

@ -41,6 +41,13 @@ RSpec.describe PersonalAccessTokenPolicy do
it { is_expected.to be_allowed(:read_token) }
it { is_expected.to be_allowed(:revoke_token) }
end
context 'subject of the impersonated token' do
let_it_be(:token) { build_stubbed(:personal_access_token, user: current_user, impersonation: true) }
it { is_expected.to be_disallowed(:read_token) }
it { is_expected.to be_disallowed(:revoke_token) }
end
end
context 'current_user is a blocked administrator', :enable_admin_mode do

View File

@ -480,8 +480,8 @@ RSpec.describe ProjectPolicy do
let(:current_user) { User.support_bot }
context 'with service desk disabled' do
it { expect_allowed(:guest_access) }
it { expect_disallowed(:create_note, :read_project) }
it { expect_allowed(:public_access) }
it { expect_disallowed(:guest_access, :create_note, :read_project) }
end
context 'with service desk enabled' do
@ -1131,12 +1131,20 @@ RSpec.describe ProjectPolicy do
let_it_be(:project_with_analytics_enabled) { create(:project, :analytics_enabled) }
before do
project_with_analytics_disabled.add_guest(guest)
project_with_analytics_private.add_guest(guest)
project_with_analytics_enabled.add_guest(guest)
project_with_analytics_disabled.add_reporter(reporter)
project_with_analytics_private.add_reporter(reporter)
project_with_analytics_enabled.add_reporter(reporter)
project_with_analytics_disabled.add_developer(developer)
project_with_analytics_private.add_developer(developer)
project_with_analytics_enabled.add_developer(developer)
end
context 'when analytics is enabled for the project' do
context 'when analytics is disabled for the project' do
let(:project) { project_with_analytics_disabled }
context 'for guest user' do
@ -1145,6 +1153,16 @@ RSpec.describe ProjectPolicy do
it { is_expected.to be_disallowed(:read_cycle_analytics) }
it { is_expected.to be_disallowed(:read_insights) }
it { is_expected.to be_disallowed(:read_repository_graphs) }
it { is_expected.to be_disallowed(:read_ci_cd_analytics) }
end
context 'for reporter user' do
let(:current_user) { reporter }
it { is_expected.to be_disallowed(:read_cycle_analytics) }
it { is_expected.to be_disallowed(:read_insights) }
it { is_expected.to be_disallowed(:read_repository_graphs) }
it { is_expected.to be_disallowed(:read_ci_cd_analytics) }
end
context 'for developer' do
@ -1153,6 +1171,7 @@ RSpec.describe ProjectPolicy do
it { is_expected.to be_disallowed(:read_cycle_analytics) }
it { is_expected.to be_disallowed(:read_insights) }
it { is_expected.to be_disallowed(:read_repository_graphs) }
it { is_expected.to be_disallowed(:read_ci_cd_analytics) }
end
end
@ -1162,9 +1181,19 @@ RSpec.describe ProjectPolicy do
context 'for guest user' do
let(:current_user) { guest }
it { is_expected.to be_disallowed(:read_cycle_analytics) }
it { is_expected.to be_disallowed(:read_insights) }
it { is_expected.to be_allowed(:read_cycle_analytics) }
it { is_expected.to be_allowed(:read_insights) }
it { is_expected.to be_disallowed(:read_repository_graphs) }
it { is_expected.to be_disallowed(:read_ci_cd_analytics) }
end
context 'for reporter user' do
let(:current_user) { reporter }
it { is_expected.to be_allowed(:read_cycle_analytics) }
it { is_expected.to be_allowed(:read_insights) }
it { is_expected.to be_allowed(:read_repository_graphs) }
it { is_expected.to be_allowed(:read_ci_cd_analytics) }
end
context 'for developer' do
@ -1173,18 +1202,29 @@ RSpec.describe ProjectPolicy do
it { is_expected.to be_allowed(:read_cycle_analytics) }
it { is_expected.to be_allowed(:read_insights) }
it { is_expected.to be_allowed(:read_repository_graphs) }
it { is_expected.to be_allowed(:read_ci_cd_analytics) }
end
end
context 'when analytics is enabled for the project' do
let(:project) { project_with_analytics_private }
let(:project) { project_with_analytics_enabled }
context 'for guest user' do
let(:current_user) { guest }
it { is_expected.to be_disallowed(:read_cycle_analytics) }
it { is_expected.to be_disallowed(:read_insights) }
it { is_expected.to be_allowed(:read_cycle_analytics) }
it { is_expected.to be_allowed(:read_insights) }
it { is_expected.to be_disallowed(:read_repository_graphs) }
it { is_expected.to be_disallowed(:read_ci_cd_analytics) }
end
context 'for reporter user' do
let(:current_user) { reporter }
it { is_expected.to be_allowed(:read_cycle_analytics) }
it { is_expected.to be_allowed(:read_insights) }
it { is_expected.to be_allowed(:read_repository_graphs) }
it { is_expected.to be_allowed(:read_ci_cd_analytics) }
end
context 'for developer' do
@ -1193,6 +1233,7 @@ RSpec.describe ProjectPolicy do
it { is_expected.to be_allowed(:read_cycle_analytics) }
it { is_expected.to be_allowed(:read_insights) }
it { is_expected.to be_allowed(:read_repository_graphs) }
it { is_expected.to be_allowed(:read_ci_cd_analytics) }
end
end
end

Some files were not shown because too many files have changed in this diff Show More