Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-01-24 06:07:44 +00:00
parent 3f29b140ab
commit 4dcdd5bebb
22 changed files with 537 additions and 77 deletions

View File

@ -134,7 +134,13 @@ module CommitsHelper
def conditionally_paginate_diff_files(diffs, paginate:, page:, per:)
if paginate
diff_files = diffs.diff_files.to_a
Gitlab::Utils::BatchLoader.clear_key([:repository_blobs, diffs.project.repository])
project = diffs.project
repo = project.repository
# While Feature flag increase_diff_file_performance exists, we clear both
Gitlab::Utils::BatchLoader.clear_key([:repository_blobs, repo, Gitlab::Diff::FileCollection::MergeRequestDiffBase.max_blob_size(project)])
Gitlab::Utils::BatchLoader.clear_key([:repository_blobs, repo, Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE])
Gitlab::Utils::BatchLoader.clear_key([:repository_blobs, repo])
Kaminari.paginate_array(diff_files).page(page).per(per).tap do |diff_files|
diff_files.each(&:add_blobs_to_batch_loader)

View File

@ -94,8 +94,11 @@ class Blob < SimpleDelegator
end
def self.lazy(repository, commit_id, path, blob_size_limit: Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE)
BatchLoader.for([commit_id, path]).batch(key: [:repository_blobs, repository]) do |items, loader, args|
args[:key].last.blobs_at(items, blob_size_limit: blob_size_limit).each do |blob|
key = [:repository_blobs, repository]
key << blob_size_limit if Feature.enabled?(:increase_diff_file_performance, repository.project)
BatchLoader.for([commit_id, path]).batch(key: key) do |items, loader, args|
args[:key].second.blobs_at(items, blob_size_limit: blob_size_limit).each do |blob|
loader.call([blob.commit_id, blob.path], blob) if blob
end
end

View File

@ -31,6 +31,16 @@ module Issues
Issues::RebalancingWorker.perform_async(nil, *issue.project.self_or_root_group_ids)
end
def execute_hooks(issue, action = 'open', old_associations: {})
issue_data = Gitlab::Lazy.new { hook_data(issue, action, old_associations: old_associations) }
hooks_scope = issue.confidential? ? :confidential_issue_hooks : :issue_hooks
issue.namespace.execute_hooks(issue_data, hooks_scope)
issue.namespace.execute_integrations(issue_data, hooks_scope)
execute_incident_hooks(issue, issue_data) if issue.work_item_type&.incident?
execute_group_mention_hooks(issue, issue_data) if action == 'open'
end
private
# overriding this because IssuableBaseService#constructor_container_arg returns { project: value }
@ -105,16 +115,6 @@ module Issues
issue, issue.project, current_user, old_assignees)
end
def execute_hooks(issue, action = 'open', old_associations: {})
issue_data = Gitlab::Lazy.new { hook_data(issue, action, old_associations: old_associations) }
hooks_scope = issue.confidential? ? :confidential_issue_hooks : :issue_hooks
issue.namespace.execute_hooks(issue_data, hooks_scope)
issue.namespace.execute_integrations(issue_data, hooks_scope)
execute_incident_hooks(issue, issue_data) if issue.work_item_type&.incident?
execute_group_mention_hooks(issue, issue_data) if action == 'open'
end
# We can remove this code after proposal in
# https://gitlab.com/gitlab-org/gitlab/-/issues/367550#proposal is updated.
def execute_incident_hooks(issue, issue_data)

View File

@ -37,12 +37,33 @@ module Timelogs
note: nil
)
old_associations = { total_time_spent: issuable.total_time_spent }
if !timelog.save
error_in_save(timelog)
else
SystemNoteService.created_timelog(issuable, issuable.project, current_user, timelog)
issuable_base_service.execute_hooks(issuable, 'update', old_associations: old_associations)
success(timelog)
end
end
private
def issuable_base_service
if issuable.is_a?(Issue)
Issues::BaseService.new(
container: issuable.project,
current_user: current_user
)
else
MergeRequests::BaseService.new(
project: issuable.project,
current_user: current_user
)
end
end
end
end

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
module WorkItems
module Callbacks
class Notes < Base
def before_update
return unless params.present? && params.key?(:discussion_locked)
return unless has_permission?(:set_work_item_metadata)
work_item.discussion_locked = params[:discussion_locked]
end
end
end
end

View File

@ -0,0 +1,8 @@
---
name: increase_diff_file_performance
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/136674
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/432600
milestone: '16.9'
type: development
group: group::code review
default_enabled: false

View File

@ -27,6 +27,10 @@ but have several advantages:
Instead of creating your own components, you can also search for published components
that have the functionality you need in the [CI/CD Catalog](#cicd-catalog).
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
For an introduction and hands-on examples, see [Efficient DevSecOps workflows with reusable CI/CD components](https://www.youtube.com/watch?v=-yvfSFKAgbA).
<!-- Video published on 2024-01-22. DRI: Developer Relations, https://gitlab.com/groups/gitlab-com/marketing/developer-relations/-/epics/399 -->
## Component project
A component project is a GitLab project with a repository that hosts one or more components.
@ -301,7 +305,7 @@ For example:
```yaml
include:
# include the component located in the current project from the current SHA
- component: gitlab.com/$CI_PROJECT_PATH/my-component@$CI_COMMIT_SHA
- component: $CI_SERVER_HOST/$CI_PROJECT_PATH/my-component@$CI_COMMIT_SHA
inputs:
stage: build
@ -315,7 +319,7 @@ ensure-job-added:
image: badouralix/curl-jq
script:
- |
route="https://gitlab.com/api/v4/projects/$CI_PROJECT_ID/pipelines/$CI_PIPELINE_ID/jobs"
route="${CI_API_V4_URL}/projects/$CI_PROJECT_ID/pipelines/$CI_PIPELINE_ID/jobs"
count=`curl --silent --header "PRIVATE-TOKEN: $API_TOKEN" $route | jq 'map(select(.name | contains("component-job"))) | length'`
if [ "$count" != "1" ]; then
exit 1

View File

@ -12,7 +12,7 @@ NOTE:
Use [this snippet](https://gitlab.com/gitlab-org/gitlab/-/snippets/2554994) for help automating the following section.
1. [Enable Anthropic API features](index.md#configure-anthropic-access).
1. [Ensure the embedding database is configured](index.md#set-up-the-embedding-database).
1. [Ensure the embedding database is configured](index.md#embeddings-database).
1. Ensure that your current branch is up-to-date with `master`.
1. Enable the feature in Rails console: `Feature.enable(:tanuki_bot_breadcrumbs_entry_point)`

View File

@ -39,6 +39,15 @@ to AI that you think could benefit from being in this list, add it!
piece of information, which helps to clarify its meaning and implications.
For GitLab Duo Chat, context is the attributes of the Issue or Epic being
referenced in a user question.
- **Embeddings**: In the context of machine learning and large language models,
embeddings refer to a technique used to represent words, phrases, or even
entire documents as dense numerical vectors in a continuous vector space.
At GitLab, [we use Vertex AI's Embeddings API](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/129930)
to create a vector representation of GitLab documentation. These
embeddings are stored in the `vertex_gitlab_docs` database table in the
`embeddings` database. The embeddings search is done in Postgres using the
`vector` extension. The vertex embeddings database is updated based on the
latest version of GitLab documentation on daily basis by running `Llm::Embedding::GitlabDocumentation::CreateEmptyEmbeddingsRecordsWorker` as a cronjob.
- **Golden Questions**: a small subset of the types of questions we think a user
should be able to ask GitLab Duo Chat. Used to generate data for Chat evaluation.
[Questions for Chat Beta](https://gitlab.com/groups/gitlab-org/-/epics/10550#what-the-user-can-ask).

View File

@ -77,20 +77,6 @@ RAILS_ENV=development bundle exec rake gitlab:duo:setup['<test-group-name>']
1. For Vertex, follow the [instructions below](#configure-gcp-vertex-access).
1. For Anthropic, create an access request
### Set up the embedding database
For features that use the embedding database, additional setup is needed.
1. Enable [`pgvector`](https://gitlab.com/gitlab-org/gitlab-development-kit/-/blob/main/doc/howto/pgvector.md#enable-pgvector-in-the-gdk) in GDK
1. Enable the embedding database in GDK
```shell
gdk config set gitlab.rails.databases.embedding.enabled true
```
1. Run `gdk reconfigure`
1. Run database migrations to create the embedding database
### Configure GCP Vertex access
In order to obtain a GCP service key for local development, follow the steps below:
@ -118,38 +104,71 @@ Gitlab::CurrentSettings.update(vertex_ai_project: PROJECT_ID)
Gitlab::CurrentSettings.update!(anthropic_api_key: <insert API key>)
```
### Populating embeddings and using embeddings fixture
### Embeddings database
Embeddings are generated through VertexAI text embeddings endpoint. The sections below explain how to populate
embeddings in the DB or extract embeddings to be used in specs.
Embeddings are generated through the [VertexAI text embeddings API](https://cloud.google.com/vertex-ai/docs/generative-ai/embeddings/get-text-embeddings). The sections
below explain how to populate embeddings in the DB or extract embeddings to be
used in specs.
#### VertexAI embeddings
#### Set up
To seed your development database with the embeddings for GitLab Documentation,
you may use the pre-generated embeddings and a Rake task.
1. Enable [`pgvector`](https://gitlab.com/gitlab-org/gitlab-development-kit/-/blob/main/doc/howto/pgvector.md#enable-pgvector-in-the-gdk) in GDK
1. Enable the embedding database in GDK
```shell
gdk config set gitlab.rails.databases.embedding.enabled true
```
1. Run `gdk reconfigure`
1. Run database migrations to create the embedding database
```shell
RAILS_ENV=development bin/rails db:migrate
```
#### Populate
Seed your development database with the embeddings for GitLab Documentation
using this Rake task:
```shell
RAILS_ENV=development bundle exec rake gitlab:llm:embeddings:vertex:seed
```
The DBCleaner gem we use clear the database tables before each test runs.
Instead of fully populating the table `vertex_gitlab_docs` where we store VertexAI embeddings for the documentations,
we can add a few selected embeddings to the table from a pre-generated fixture.
This Rake Task populates the embeddings database with a vectorized
representation of all GitLab Documentation. The file the Rake Task uses as a
source is a snapshot of GitLab Documentation at some point in the past and is
not updated regularly. As a result, it is helpful to know that this seed task
creates embeddings based on GitLab Documentation that is out of date. Slightly
outdated documentation embeddings are sufficient for the development
environment, which is the use-case for the seed task.
For instance, to test that the question "How can I reset my password" is correctly
retrieving the relevant embeddings and answered, we can extract the top N closet embeddings
to the question into a fixture and only restore a small number of embeddings quickly.
To facilitate an extraction process, a Rake task has been written.
You can add or remove the questions needed to be tested in the Rake task and run the task to generate a new fixture.
When writing or updating tests related to embeddings, you may want to update the
embeddings fixture file:
```shell
RAILS_ENV=development bundle exec rake gitlab:llm:embeddings:vertex:extract_embeddings
```
#### Using embeddings in specs
#### Use embeddings in specs
The `seed` Rake Task populates the development database with embeddings for all GitLab
Documentation. The `extract_embeddings` Rake Task populates a fixture file with a subset
of embeddings.
The set of questions listed in the Rake Task itself determines
which embeddings are pulled into the fixture file. For example, one of the
questions is "How can I reset my password?" The `extract_embeddings` Task
pulls the most relevant embeddings for this question from the development
database (which has data from the `seed` Rake Task) and saves those embeddings
in `ee/spec/fixtures/vertex_embeddings`. This fixture is used in tests related
to embeddings.
If you would like to change any of the questions supported in embeddings specs,
update and re-run the `extract_embeddings` Rake Task.
In the specs where you need to use the embeddings,
use the RSpec config hook `:ai_embedding_fixtures` on a context.
use the RSpec `:ai_embedding_fixtures` metadata.
```ruby
context 'when asking about how to use GitLab', :ai_embedding_fixtures do

View File

@ -32,6 +32,10 @@ For server-side controls and enforcement of best practices, see
You can use push options to skip a CI/CD pipeline, or pass CI/CD variables.
NOTE:
Push options are not available for merge request pipelines. For more information,
see [issue 373212](https://gitlab.com/gitlab-org/gitlab/-/issues/373212).
| Push option | Description | Example |
|--------------------------------|-------------|---------|
| `ci.skip` | Do not create a CI/CD pipeline for the latest push. Skips only branch pipelines and not [merge request pipelines](../../ci/pipelines/merge_request_pipelines.md). This does not skip pipelines for CI/CD integrations, such as Jenkins. | `git push -o ci.skip` |

View File

@ -5,7 +5,7 @@ module Gitlab
class File
include Gitlab::Utils::StrongMemoize
attr_reader :diff, :repository, :diff_refs, :fallback_diff_refs, :unique_identifier
attr_reader :diff, :repository, :diff_refs, :fallback_diff_refs, :unique_identifier, :max_blob_size
delegate :new_file?, :deleted_file?, :renamed_file?, :unidiff,
:old_path, :new_path, :a_mode, :b_mode, :mode_changed?,
@ -31,7 +31,8 @@ module Gitlab
diff_refs: nil,
fallback_diff_refs: nil,
stats: nil,
unique_identifier: nil)
unique_identifier: nil,
max_blob_size: nil)
@diff = diff
@stats = stats
@ -39,6 +40,7 @@ module Gitlab
@diff_refs = diff_refs
@fallback_diff_refs = fallback_diff_refs
@unique_identifier = unique_identifier
@max_blob_size = max_blob_size
@unfolded = false
# Ensure items are collected in the the batch
@ -397,8 +399,12 @@ module Gitlab
def fetch_blob(sha, path)
return unless sha
if max_blob_size.present?
Blob.lazy(repository, sha, path, blob_size_limit: max_blob_size)
else
Blob.lazy(repository, sha, path)
end
end
def total_blob_lines(blob)
@total_lines ||= begin

View File

@ -119,7 +119,8 @@ module Gitlab
repository: project.repository,
diff_refs: diff_refs,
fallback_diff_refs: fallback_diff_refs,
stats: stats)
stats: stats,
max_blob_size: self.class.max_blob_size(project))
if @use_extra_viewer_as_main && diff_file.has_renderable?
diff_file.rendered
@ -131,6 +132,8 @@ module Gitlab
def sort_diffs(diffs)
Gitlab::Diff::FileCollectionSorter.new(diffs).sort
end
def self.max_blob_size(_) = nil
end
end
end

View File

@ -49,6 +49,14 @@ module Gitlab
diff_stats_cache.clear
end
override :max_blob_size
def self.max_blob_size(project)
return unless Feature.enabled?(:increase_diff_file_performance, project)
[Gitlab::Git::Diff.patch_hard_limit_bytes,
Gitlab.config.extra['maximum_text_highlight_size_kilobytes']].max
end
private
def highlight_cache

View File

@ -24,4 +24,14 @@ RSpec.describe Gitlab::Diff::FileCollection::Base do
end
end
end
describe '#raw_diff_files' do
let(:max_blob_size) { 10 }
it 'returns diffs that contain a maximum of max_blob_size of data' do
allow(described_class).to receive(:max_blob_size).and_return(max_blob_size)
expect(described_class.new(diffable, project: merge_request.project).raw_diff_files.all? { |file| file.max_blob_size == max_blob_size }).to be_truthy
end
end
end

View File

@ -44,4 +44,43 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBase do
expect(cache_key).to eq diffable.cache_key
end
end
describe '.max_blob_size' do
let(:project) { merge_request.project }
context 'when increase_diff_file_performance is enabled' do
before do
allow(Gitlab::Git::Diff).to receive(:patch_hard_limit_bytes).and_return(max_diff)
stub_config(extra: { 'maximum_text_highlight_size_kilobytes' => max_config })
end
context 'when Gitlab::Git::Diff.patch_hard_limit_bytes is larger' do
let(:max_diff) { 10 }
let(:max_config) { 1 }
it 'returns the Gitlab::Git::Diff.patch_hard_limit_bytes setting' do
expect(described_class.max_blob_size(project)).to eq(max_diff)
end
end
context 'when maximum_text_highlight_size_kilobytes setting is larger' do
let(:max_diff) { 10 }
let(:max_config) { 100 }
it 'returns the maximum_text_highlight_size_kilobytes setting' do
expect(described_class.max_blob_size(project)).to eq(max_config)
end
end
end
context 'when increase_diff_file_performance is disabled' do
before do
stub_feature_flags(increase_diff_file_performance: false)
end
it 'returns nil' do
expect(described_class.max_blob_size(project)).to eq(nil)
end
end
end
end

View File

@ -8,7 +8,9 @@ RSpec.describe Gitlab::Diff::File do
let_it_be(:project) { create(:project, :repository) }
let(:commit) { project.commit(sample_commit.id) }
let(:diff) { commit.raw_diffs.first }
let(:diff_file) { described_class.new(diff, diff_refs: commit.diff_refs, repository: project.repository) }
let(:diff_file) do
described_class.new(diff, diff_refs: commit.diff_refs, repository: project.repository)
end
def create_file(file_name, content)
Files::CreateService.new(
@ -285,12 +287,41 @@ RSpec.describe Gitlab::Diff::File do
end
describe '#old_blob and #new_blob' do
context 'when increase_diff_file_performance is on' do
let(:diff_file) do
described_class.new(diff, diff_refs: commit.diff_refs, repository: project.repository, max_blob_size: max_blob_size)
end
let(:max_blob_size) { 1000 }
before do
stub_feature_flags(increase_diff_file_performance: true)
end
context 'when the blobs are truncated' do
let(:max_blob_size) { 10 }
it 'returns the truncated blobs' do
items = [
[diff_file.new_content_sha, diff_file.new_path], [diff_file.old_content_sha, diff_file.old_path]
]
expect(project.repository).to receive(:blobs_at).with(items, blob_size_limit: max_blob_size).and_call_original
old_data = diff_file.old_blob.data
data = diff_file.new_blob.data
expect(old_data.size).to eq(max_blob_size)
expect(data.size).to eq(max_blob_size)
end
end
it 'returns blob of base commit and the new commit' do
items = [
[diff_file.new_content_sha, diff_file.new_path], [diff_file.old_content_sha, diff_file.old_path]
]
expect(project.repository).to receive(:blobs_at).with(items, blob_size_limit: 10.megabytes).and_call_original
expect(project.repository).to receive(:blobs_at).with(items, blob_size_limit: max_blob_size).and_call_original
old_data = diff_file.old_blob.data
data = diff_file.new_blob.data
@ -300,6 +331,31 @@ RSpec.describe Gitlab::Diff::File do
end
end
context 'when increase_diff_file_performance is off' do
let(:diff_file) do
described_class.new(diff, diff_refs: commit.diff_refs, repository: project.repository)
end
before do
stub_feature_flags(increase_diff_file_performance: false)
end
it 'returns blob of base commit and the new commit' do
items = [
[diff_file.new_content_sha, diff_file.new_path], [diff_file.old_content_sha, diff_file.old_path]
]
expect(project.repository).to receive(:blobs_at).with(items, blob_size_limit: Gitlab::Git::Blob::MAX_DATA_DISPLAY_SIZE).and_call_original
old_data = diff_file.old_blob.data
data = diff_file.new_blob.data
expect(old_data).to include('raise "System commands must be given as an array of strings"')
expect(data).to include('raise RuntimeError, "System commands must be given as an array of strings"')
end
end
end
describe '#diffable?' do
context 'when attributes exist' do
let(:commit) { project.commit('1a0b36b3cdad1d2ee32457c102a8c0b7056fa863') }

View File

@ -68,31 +68,89 @@ RSpec.describe Blob do
end
end
context 'when increase_diff_file_performance is turned off' do
before do
stub_feature_flags(increase_diff_file_performance: false)
end
context 'with project' do
let(:container) { create(:project, :repository) }
let(:same_container) { Project.find(container.id) }
let(:other_container) { create(:project, :repository) }
let_it_be(:container) { create(:project, :repository) }
let_it_be(:same_container) { Project.find(container.id) }
let_it_be(:other_container) { create(:project, :repository) }
it_behaves_like '.lazy checks'
end
context 'with personal snippet' do
let(:container) { create(:personal_snippet, :repository) }
let(:same_container) { PersonalSnippet.find(container.id) }
let(:other_container) { create(:personal_snippet, :repository) }
let_it_be(:container) { create(:personal_snippet, :repository) }
let_it_be(:same_container) { PersonalSnippet.find(container.id) }
let_it_be(:other_container) { create(:personal_snippet, :repository) }
it_behaves_like '.lazy checks'
end
context 'with project snippet' do
let(:container) { create(:project_snippet, :repository) }
let(:same_container) { ProjectSnippet.find(container.id) }
let(:other_container) { create(:project_snippet, :repository) }
let_it_be(:container) { create(:project_snippet, :repository) }
let_it_be(:same_container) { ProjectSnippet.find(container.id) }
let_it_be(:other_container) { create(:project_snippet, :repository) }
it_behaves_like '.lazy checks'
end
end
context 'when increase_diff_file_performance is turned on' do
context 'with project' do
let_it_be(:container) { create(:project, :repository) }
let_it_be(:same_container) { Project.find(container.id) }
let_it_be(:other_container) { create(:project, :repository) }
it_behaves_like '.lazy checks'
context 'when the blob size limit is different' do
it 'fetches all blobs for the same repository and same blob size limit when one is accessed' do
expect(container.repository).to receive(:blobs_at)
.with([[commit_id, 'CHANGELOG']], blob_size_limit: 10)
.once.and_call_original
expect(same_container.repository).to receive(:blobs_at)
.with([[commit_id, 'CONTRIBUTING.md'], [commit_id, 'README.md']], blob_size_limit: 20)
.once.and_call_original
expect(other_container.repository).not_to receive(:blobs_at)
changelog = described_class.lazy(container.repository, commit_id, 'CHANGELOG', blob_size_limit: 10)
contributing = described_class.lazy(same_container.repository, commit_id, 'CONTRIBUTING.md',
blob_size_limit: 20)
described_class.lazy(same_container.repository, commit_id, 'README.md',
blob_size_limit: 20)
described_class.lazy(other_container.repository, commit_id, 'CHANGELOG', blob_size_limit: 30)
# Access property so the values are loaded
changelog.id
contributing.id
end
end
end
context 'with personal snippet' do
let_it_be(:container) { create(:personal_snippet, :repository) }
let_it_be(:same_container) { PersonalSnippet.find(container.id) }
let_it_be(:other_container) { create(:personal_snippet, :repository) }
it_behaves_like '.lazy checks'
end
context 'with project snippet' do
let_it_be(:container) { create(:project_snippet, :repository) }
let_it_be(:same_container) { ProjectSnippet.find(container.id) }
let_it_be(:other_container) { create(:project_snippet, :repository) }
it_behaves_like '.lazy checks'
end
end
end
describe '#data' do
shared_examples '#data checks' do
context 'using a binary blob' do

View File

@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe WorkItems::Widgets::Notes, feature_category: :team_planning do
let_it_be(:work_item) { create(:work_item) }
let_it_be(:work_item) { create(:work_item, :objective, discussion_locked: true) }
let_it_be(:note) { create(:note, noteable: work_item, project: work_item.project) }
describe '.type' do
@ -17,4 +17,22 @@ RSpec.describe WorkItems::Widgets::Notes, feature_category: :team_planning do
describe '#notes' do
it { expect(described_class.new(work_item).notes).to eq(work_item.notes) }
end
describe '.quick_action_params' do
subject { described_class.quick_action_params }
it { is_expected.to include(:discussion_locked) }
end
describe '.quick_action_commands' do
subject { described_class.quick_action_commands }
it { is_expected.to match_array([:lock, :unlock]) }
end
describe '#discussion_locked' do
subject { described_class.new(work_item).discussion_locked }
it { is_expected.to eq(work_item.discussion_locked) }
end
end

View File

@ -1479,6 +1479,109 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
end
end
context 'with notes widget input' do
let(:discussion_locked) { true }
let(:input) { { 'notesWidget' => { 'discussionLocked' => true } } }
let(:fields) do
<<~FIELDS
workItem {
widgets {
type
... on WorkItemWidgetNotes {
discussionLocked
}
}
}
errors
FIELDS
end
shared_examples 'work item is not updated' do
it 'ignores the update' do
expect do
post_graphql_mutation(mutation, current_user: current_user)
work_item.reload
end.not_to change(&work_item_change)
end
end
it_behaves_like 'work item is not updated' do
let(:current_user) { guest }
let(:work_item_change) { -> { work_item.discussion_locked } }
end
context 'when user has permissions to update the work item' do
let(:current_user) { reporter }
it 'updates work item discussion locked attribute on notes widget' do
expect do
post_graphql_mutation(mutation, current_user: current_user)
work_item.reload
end.to change { work_item.discussion_locked }.from(nil).to(true)
expect(response).to have_gitlab_http_status(:success)
expect(mutation_response['workItem']['widgets']).to include(
{
'discussionLocked' => true,
'type' => 'NOTES'
}
)
end
context 'when using quick action' do
let(:input) { { 'descriptionWidget' => { 'description' => "/lock" } } }
it 'updates work item discussion locked attribute on notes widget' do
expect do
post_graphql_mutation(mutation, current_user: current_user)
work_item.reload
end.to change { work_item.discussion_locked }.from(nil).to(true)
expect(response).to have_gitlab_http_status(:success)
expect(mutation_response['workItem']['widgets']).to include(
{
'discussionLocked' => true,
'type' => 'NOTES'
}
)
end
context 'when unlocking discussion' do
let(:input) { { 'descriptionWidget' => { 'description' => "/unlock" } } }
before do
work_item.update!(discussion_locked: true)
end
it 'updates work item discussion locked attribute on notes widget' do
expect do
post_graphql_mutation(mutation, current_user: current_user)
work_item.reload
end.to change { work_item.discussion_locked }.from(true).to(false)
expect(response).to have_gitlab_http_status(:success)
end
end
context 'when the work item type does not support the notes widget' do
let(:input) do
{ 'descriptionWidget' => { 'description' => "Updating notes discussion locked.\n/lock" } }
end
before do
WorkItems::Type.default_by_type(:issue).widget_definitions
.find_by_widget_type(:notes).update!(disabled: true)
end
it_behaves_like 'work item is not updated' do
let(:work_item_change) { -> { work_item.discussion_locked } }
end
end
end
end
end
context 'when unsupported widget input is sent' do
let_it_be(:work_item) { create(:work_item, :test_case, project: project) }

View File

@ -0,0 +1,66 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe WorkItems::Callbacks::Notes, feature_category: :team_planning do
let_it_be(:guest) { create(:user) }
let_it_be(:reporter) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be_with_reload(:work_item) do
create(:work_item, project: project, author: guest, discussion_locked: nil)
end
let(:current_user) { guest }
let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Notes) } }
let(:discussion_locked) { true }
let(:params) { { discussion_locked: discussion_locked } }
let(:service) { described_class.new(issuable: work_item, current_user: current_user, params: params) }
before_all do
project.add_guest(guest)
project.add_reporter(reporter)
end
subject(:update_discussion_locked) { service.before_update }
describe '#before_update_callback' do
shared_examples 'discussion_locked is unchanged' do
it 'does not change the discussion_locked of the work item' do
expect { update_discussion_locked }.to not_change { work_item.discussion_locked }
end
end
context 'when discussion_locked param is not present' do
let(:params) { {} }
it_behaves_like 'discussion_locked is unchanged'
end
context 'when user cannot set work item metadata' do
let(:current_user) { guest }
it_behaves_like 'discussion_locked is unchanged'
end
context 'when user can set work item metadata' do
let(:current_user) { reporter }
it 'sets the discussion_locked for the work item' do
expect { update_discussion_locked }.to change { work_item.discussion_locked }.from(nil).to(true)
end
context 'when widget does not exist in new type' do
let(:params) { {} }
before do
allow(service).to receive(:new_type_excludes_widget?).and_return(true)
work_item.discussion_locked = true
end
it "keeps item's discussion_locked value intact" do
expect { update_discussion_locked }.not_to change { work_item.discussion_locked }
end
end
end
end
end

View File

@ -9,6 +9,11 @@ RSpec.shared_examples 'issuable supports timelog creation service' do
shared_examples 'success_response' do
it 'sucessfully saves the timelog' do
expect(Projects::TriggeredHooks).to receive(:new).with(
issuable.is_a?(Issue) ? :issue_hooks : :merge_request_hooks,
a_hash_including(changes: a_hash_including(total_time_spent: { previous: 0, current: time_spent }))
).and_call_original
is_expected.to be_success
timelog = subject.payload[:timelog]