Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-05-16 12:18:27 +00:00
parent f7b92ec732
commit f6b1c1cf73
76 changed files with 2329 additions and 709 deletions

View File

@ -15,9 +15,11 @@ import { getIdFromGraphQLId, convertToGraphQLId } from '~/graphql_shared/utils';
import { TYPENAME_USER } from '~/graphql_shared/constants';
import { __ } from '~/locale';
import { setUrlFragment } from '~/lib/utils/url_utility';
import ImportedBadge from '~/vue_shared/components/imported_badge.vue';
import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import EmojiPicker from '~/emoji/components/picker.vue';
import { TYPE_COMMENT } from '~/import/constants';
import getDesignQuery from '../../graphql/queries/get_design.query.graphql';
import updateNoteMutation from '../../graphql/mutations/update_note.mutation.graphql';
import designNoteAwardEmojiToggleMutation from '../../graphql/mutations/design_note_award_emoji_toggle.mutation.graphql';
@ -42,6 +44,7 @@ export default {
GlButton,
GlDisclosureDropdown,
GlLink,
ImportedBadge,
TimeAgoTooltip,
TimelineEntryItem,
},
@ -124,6 +127,9 @@ export default {
isEditingAndHasPermissions() {
return !this.isEditing && this.adminPermissions;
},
isImported() {
return this.note.imported;
},
adminPermissions() {
return this.note.userPermissions.adminNote;
},
@ -261,6 +267,7 @@ export default {
},
},
updateNoteMutation,
TYPE_COMMENT,
};
</script>
@ -294,11 +301,12 @@ export default {
<span class="note-headline-light note-headline-meta">
<span class="system-note-message"> <slot></slot> </span>
<gl-link
class="note-timestamp system-note-separator gl-display-block gl-mb-2 gl-font-sm link-inherit-color"
class="note-timestamp system-note-separator gl-display-inline-block gl-mb-2 gl-font-sm link-inherit-color"
:href="`#note_${noteAnchorId}`"
>
<time-ago-tooltip :time="note.createdAt" tooltip-placement="bottom" />
</gl-link>
<imported-badge v-if="isImported" :importable-type="$options.TYPE_COMMENT" size="sm" />
</span>
</div>
<div class="gl-display-flex gl-align-items-flex-start gl-mt-n2 gl-mr-n2">

View File

@ -11,6 +11,7 @@ fragment DesignNote on Note {
bodyHtml
createdAt
resolved
imported
awardEmoji {
nodes {
name

View File

@ -3,7 +3,7 @@
}
.top-bar-fixed {
@include gl-inset-border-b-1-gray-100;
@apply gl-shadow-inner-b-1-gray-100;
background-color: $body-bg;
position: fixed;
left: var(--application-bar-left);

View File

@ -0,0 +1,32 @@
# frozen_string_literal: true
module Mutations
module Ml
module Models
class Delete < Base
graphql_name 'MlModelDelete'
include FindsProject
argument :id, ::Types::GlobalIDType[::Ml::Model],
required: true,
description: 'Global ID of the model to be deleted.'
def resolve(**args)
project = authorized_find!(args[:project_path])
model = ::Ml::Model.by_project_id_and_id(project.id, args[:id].model_id)
return { errors: ['Model not found'] } unless model
result = ::Ml::DestroyModelService.new(model, current_user).execute
{
model: result.payload[:model],
errors: result.errors
}
end
end
end
end
end

View File

@ -210,6 +210,7 @@ module Types
mount_mutation Mutations::Admin::AbuseReportLabels::Create, alpha: { milestone: '16.4' }
mount_mutation Mutations::Ml::Models::Create, alpha: { milestone: '16.8' }
mount_mutation Mutations::Ml::Models::Destroy, alpha: { milestone: '16.10' }
mount_mutation Mutations::Ml::Models::Delete, alpha: { milestone: '17.0' }
mount_mutation Mutations::Ml::ModelVersions::Delete, alpha: { milestone: '17.0' }
mount_mutation Mutations::BranchRules::Delete, alpha: { milestone: '16.9' }
end

View File

@ -49,6 +49,10 @@ module Types
null: true,
description: 'List of emoji reactions associated with the note.'
field :imported, GraphQL::Types::Boolean,
null: true,
description: 'Indicates whether the note was imported.',
method: :imported?
field :internal, GraphQL::Types::Boolean,
null: true,
description: 'Indicates if the note is internal.',

View File

@ -143,6 +143,13 @@ module Ci
scope :eager_load_job_artifacts, -> { includes(:job_artifacts) }
scope :eager_load_tags, -> { includes(:tags) }
scope :eager_load_for_archiving_trace, -> { preload(:project, :pending_state) }
scope :eager_load_for_api, -> do
preload(
:job_artifacts_archive, :job_artifacts, :runner, :tags, :runner_manager, :metadata,
pipeline: :project,
user: [:user_preference, :user_detail, :followees]
)
end
scope :eager_load_everything, -> do
includes(

View File

@ -578,7 +578,7 @@ class Integration < ApplicationRecord
fields.reject { _1[:type] == :password || _1[:name] == 'webhook' || (_1.key?(:if) && _1[:if] != true) }.pluck(:name)
end
def self.api_fields
def self.api_arguments
fields.filter_map do |field|
next if field.if != true

View File

@ -16,7 +16,7 @@ module Ci
end
end
condition(:unprotected_ref) do
condition(:unprotected_ref, scope: :subject) do
if @subject.tag?
!ProtectedTag.protected?(@subject.project, @subject.ref)
else
@ -56,11 +56,11 @@ module Ci
@subject.debug_mode?
end
condition(:can_read_project_build, scope: :subject) do
condition(:can_read_project_build) do
can?(:read_build, @subject.project)
end
condition(:project_update_build, scope: :subject) do
condition(:project_update_build) do
can?(:update_build, @subject.project)
end

View File

@ -7,7 +7,7 @@ module Ci
included do
prepend_mod_with('Ci::DeployablePolicy') # rubocop: disable Cop/InjectEnterpriseEditionModule
condition(:has_outdated_deployment) do
condition(:has_outdated_deployment, scope: :subject) do
@subject.has_outdated_deployment?
end

View File

@ -12,7 +12,7 @@ module Ci
@subject.none_access?
end
condition(:can_read_project_build, scope: :subject) do
condition(:can_read_project_build) do
can?(:read_build, @subject.job.project)
end

View File

@ -8,14 +8,14 @@ module Ml
end
def execute
return error unless @model.destroy
package_deletion_result = ::Packages::MarkPackagesForDestructionService.new(
packages: @model.all_packages,
current_user: @user
).execute
return packages_not_deleted if package_deletion_result.error?
return packages_not_deleted(package_deletion_result.message) if package_deletion_result.error?
return error unless @model.destroy
success
end
@ -23,15 +23,19 @@ module Ml
private
def success
ServiceResponse.success(message: _('Model was successfully deleted'))
ServiceResponse.success(payload: payload)
end
def error
ServiceResponse.error(message: _('Failed to delete model'))
ServiceResponse.error(message: @model.errors.full_messages, payload: payload)
end
def packages_not_deleted
ServiceResponse.success(message: _('Model deleted but failed to remove associated packages'))
def packages_not_deleted(error_message)
ServiceResponse.error(message: error_message, payload: payload)
end
def payload
{ model: @model }
end
end
end

View File

@ -45,7 +45,7 @@
= tab_link_for @merge_request, :diffs do
= _("Changes")
= gl_badge_tag tab_count_display(@merge_request, @diffs_count), { size: :sm, class: 'js-changes-tab-count', data: { gid: @merge_request.to_gid.to_s } }
.d-flex.gl-flex-wrap.gl-align-items-center.justify-content-lg-end
.gl-flex.gl-flex-wrap.gl-items-center.justify-content-lg-end
#js-vue-discussion-counter{ data: { blocks_merge: @project.only_allow_merge_if_all_discussions_are_resolved?.to_s } }
- if !!@issuable_sidebar.dig(:current_user, :id)
.gl-display-flex.gl-gap-3
@ -53,7 +53,7 @@
- if notifications_todos_buttons_enabled?
.js-sidebar-subscriptions-widget-root{ data: { full_path: @issuable_sidebar[:project_full_path], iid: @issuable_sidebar[:iid] } }
.gl-ml-auto.gl-align-items-center.gl-display-none.gl-md-display-flex.gl-ml-3.js-expand-sidebar.gl-absolute.gl-right-5{ class: "gl-lg-display-none!" }
.gl-ml-auto.gl-items-center.gl-hidden.sm:gl-flex.lg:gl-hidden.gl-ml-3.js-expand-sidebar.gl-absolute.gl-right-5
= render Pajamas::ButtonComponent.new(icon: 'chevron-double-lg-left',
button_options: { class: 'js-sidebar-toggle' }) do
= _('Expand')

View File

@ -27,6 +27,21 @@
"project.id",
"namespace.id"
]
},
"filter": {
"type": "object",
"properties": {
"label": {
"type": "string"
},
"property": {
"type": "string"
},
"value": {
"type": "number"
}
},
"additionalProperties": false
}
},
"additionalProperties": false

View File

@ -581,6 +581,8 @@
- 3
- - product_analytics_initialize_snowplow_product_analytics
- 1
- - product_analytics_move_funnels
- 1
- - product_analytics_post_push
- 1
- - product_analytics_sync_funnels

View File

@ -127,6 +127,7 @@ module.exports = {
// TODO: backport these inset box shadows to GitLab UI
'inner-1-gray-100': 'inset 0 0 0 1px var(--gray-100, #dcdcde)',
'inner-b-1-gray-100': 'inset 0 -1px 0 0 var(--gray-100, #dcdcde)',
'inner-1-gray-200': 'inset 0 0 0 1px var(--gray-200, #bfbfc3)',
'inner-l-4-gray-100': 'inset 4px 0 0 0 var(--gray-100, #dcdcde)',
'inner-1-red-400': 'inset 0 0 0 1px var(--red-400, #ec5941)',

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
# See https://docs.gitlab.com/ee/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddFileNameFileSha256IndexToPackageFiles < Gitlab::Database::Migration[2.2]
milestone '17.1'
disable_ddl_transaction!
INDEX_NAME = 'index_packages_package_files_on_file_name_file_sha256'
def up
add_concurrent_index :packages_package_files, 'file_name, file_sha256', name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :packages_package_files, name: INDEX_NAME
end
end

View File

@ -0,0 +1 @@
0e990557beda81144aef661a706563c90a30ed68b96861bde5c8a13c6e46125e

View File

@ -26709,6 +26709,8 @@ CREATE INDEX index_packages_package_file_build_infos_on_pipeline_id ON packages_
CREATE INDEX index_packages_package_files_on_file_name ON packages_package_files USING gin (file_name gin_trgm_ops);
CREATE INDEX index_packages_package_files_on_file_name_file_sha256 ON packages_package_files USING btree (file_name, file_sha256);
CREATE INDEX index_packages_package_files_on_file_store ON packages_package_files USING btree (file_store);
CREATE INDEX index_packages_package_files_on_id_for_cleanup ON packages_package_files USING btree (id) WHERE (status = 1);

View File

@ -227,14 +227,22 @@ the rate limiting in the settings. The settings in the Admin Area
take effect immediately, while setting the environment variable
requires a restart of all the Puma processes.
<!-- ## Troubleshooting
## Troubleshooting
Include any troubleshooting steps that you can foresee. If you know beforehand what issues
one might have when setting this up, or when something is changed, or on upgrading, it's
important to describe those, too. Think of things that may go wrong and include them here.
This is important to minimize requests for support, and to avoid doc comments with
questions that you know someone might ask.
### Disable throttling after accidentally locking administrators out
Each scenario can be a third-level heading, for example `### Getting error message X`.
If you have none to add when creating a doc, leave this section in place
but commented out to help encourage others to add to it in the future. -->
If many users connect to GitLab through the same proxy or network gateway,
it is possible that, if a rate limit is too low, that limit will also lock administrators out,
because GitLab sees them using the same IP as the requests that triggered the throttling.
Administrators can use [the Rails console](../operations/rails_console.md) to disable the same limits as listed for
[the `GITLAB_THROTTLE_DRY_RUN` variable](#try-out-throttling-settings-before-enforcing-them).
For example:
```ruby
Gitlab::CurrentSettings.update!(throttle_authenticated_web_enabled: false)
```
In this example, the `throttle_authenticated_web` parameter has the `_enabled` name suffix.
To set numeric values for the limits, replace the `_enabled` name suffix with the `_period_in_seconds` and `_requests_per_period` suffixes.

View File

@ -80,10 +80,6 @@ four standard [pagination arguments](#pagination-arguments):
Retrieve the active add-on purchase. This query can be used in GitLab SaaS and self-managed environments.
DETAILS:
**Introduced** in GitLab 16.7.
**Status**: Experiment.
Returns [`AddOnPurchase`](#addonpurchase).
#### Arguments
@ -6593,6 +6589,30 @@ Input type: `MlModelCreateInput`
| <a id="mutationmlmodelcreateerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
| <a id="mutationmlmodelcreatemodel"></a>`model` | [`MlModel`](#mlmodel) | Model after mutation. |
### `Mutation.mlModelDelete`
DETAILS:
**Introduced** in GitLab 17.0.
**Status**: Experiment.
Input type: `MlModelDeleteInput`
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationmlmodeldeleteclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationmlmodeldeleteid"></a>`id` | [`MlModelID!`](#mlmodelid) | Global ID of the model to be deleted. |
| <a id="mutationmlmodeldeleteprojectpath"></a>`projectPath` | [`ID!`](#id) | Project the model to mutate is in. |
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationmlmodeldeleteclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationmlmodeldeleteerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
| <a id="mutationmlmodeldeletemodel"></a>`model` | [`MlModel`](#mlmodel) | Model after mutation. |
### `Mutation.mlModelDestroy`
DETAILS:
@ -9127,10 +9147,6 @@ Input type: `UserAchievementsDeleteInput`
### `Mutation.userAddOnAssignmentBulkCreate`
DETAILS:
**Introduced** in GitLab 16.11.
**Status**: Experiment.
Input type: `UserAddOnAssignmentBulkCreateInput`
#### Arguments
@ -9151,10 +9167,6 @@ Input type: `UserAddOnAssignmentBulkCreateInput`
### `Mutation.userAddOnAssignmentBulkRemove`
DETAILS:
**Introduced** in GitLab 16.11.
**Status**: Experiment.
Input type: `UserAddOnAssignmentBulkRemoveInput`
#### Arguments
@ -9175,10 +9187,6 @@ Input type: `UserAddOnAssignmentBulkRemoveInput`
### `Mutation.userAddOnAssignmentCreate`
DETAILS:
**Introduced** in GitLab 16.3.
**Status**: Experiment.
Input type: `UserAddOnAssignmentCreateInput`
#### Arguments
@ -9200,10 +9208,6 @@ Input type: `UserAddOnAssignmentCreateInput`
### `Mutation.userAddOnAssignmentRemove`
DETAILS:
**Introduced** in GitLab 16.3.
**Status**: Experiment.
Input type: `UserAddOnAssignmentRemoveInput`
#### Arguments
@ -25716,6 +25720,7 @@ Describes where code is deployed for a project organized by folder.
| <a id="notecreatedat"></a>`createdAt` | [`Time!`](#time) | Timestamp of the note creation. |
| <a id="notediscussion"></a>`discussion` | [`Discussion`](#discussion) | Discussion the note is a part of. |
| <a id="noteid"></a>`id` | [`NoteID!`](#noteid) | ID of the note. |
| <a id="noteimported"></a>`imported` | [`Boolean`](#boolean) | Indicates whether the note was imported. |
| <a id="noteinternal"></a>`internal` | [`Boolean`](#boolean) | Indicates if the note is internal. |
| <a id="notelasteditedat"></a>`lastEditedAt` | [`Time`](#time) | Timestamp when note was last edited. |
| <a id="notelasteditedby"></a>`lastEditedBy` | [`UserCore`](#usercore) | User who last edited the note. |

View File

@ -489,6 +489,9 @@ prompt is built by AI Gateway).
## Embeddings
NOTE:
For the embedding database, see [RAG for GitLab Duo](../gitlab_duo_rag/index.md).
Embeddings can be requested for all features in a single endpoint, for
example through a request like this:

View File

@ -0,0 +1,41 @@
---
status: proposed
creation-date: "2024-01-25"
authors: [ "@shinya.maeda", "@mikolaj_wawrzyniak" ]
coach: [ "@stanhu" ]
approvers: [ "@pwietchner", "@oregand", "@tlinz" ]
owning-stage: "~devops::ai-powered"
participating-stages: ["~devops::data stores", "~devops::create"]
---
# Elasticsearch
For more information on Elasticsearch and RAG broadly, see the [Elasticsearch article](../gitlab_rag/elasticsearch.md) in [RAG at GitLab](../gitlab_rag/index.md).
## Retrieve GitLab Documentation
A [proof of concept](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/145392) was done to switch the documentation embeddings from being stored in the embedding database to being stored on Elasticsearch.
### Synchronizing embeddings with data source
The same procedure used by [PostgreSQL](postgresql.md) can be followed to keep the embeddings up to date in Elasticsearch.
### Retrieval
To get the nearest neighbours, the following query can be executed an index containing the embeddings:
```ruby
{
"knn": {
"field": vector_field_containing_embeddings,
"query_vector": embedding_for_question,
"k": limit,
"num_candidates": number_of_candidates_to_compare
}
}
```
### Requirements to get to self-managed
- Productionalize the PoC [MR](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/145392)
- Get more self-managed instances to install Elasticsearch by [shipping GitLab with Elasticsearch](https://gitlab.com/gitlab-org/gitlab/-/issues/438178). Elastic gave their approval to ship with the free license. The work required for making it easy for customers to host Elasticsearch is more than 2 milestones.

View File

@ -0,0 +1,37 @@
---
status: proposed
creation-date: "2024-01-25"
authors: [ "@shinya.maeda", "@mikolaj_wawrzyniak" ]
coach: [ "@stanhu" ]
approvers: [ "@pwietchner", "@oregand", "@tlinz" ]
owning-stage: "~devops::ai-powered"
participating-stages: ["~devops::data stores", "~devops::create"]
---
# Retrieval Augmented Generation (RAG) for GitLab Duo on self-managed
RAG is an application architecture used to provide knowledge to a large language model that doesn't exist in its training set, so that it can use that knowledge to answer user questions. To learn more about RAG, see [RAG for GitLab](../gitlab_rag/index.md).
## Goals of this blueprint
This blueprint aims to drive a decision for a RAG solution for GitLab Duo on self-managed, specifically for shipping GitLab Duo with access to GitLab documentation. We outline three potential solutions, including PoCs for each to demonstrate feasibility for this use case.
## Constraints
- The solution must be viable for self-managed customers to run and maintain
- The solution must be shippable in 1-2 milestones <!-- I don't actually know that this is true, just adding an item for time constraint -->
- The solution should be low-lock-in, since we are still determining our long term technical solution(s) for RAG at GitLab
## Proposals for GitLab Duo Chat RAG for GitLab documentation
The following solutions have been proposed and evaluated for the GitLab Duo Chat for GitLab documentation use case:
- [Vertex AI Search](vertex_ai_search.md)
- [Elasticsearch](elasticsearch.md)
- [PostgreSQL with PGVector extension](postgresql.md)
You can read more about how each evaluatoin was conducted in the links above.
## Chosen solution
[Vertex AI Search](vertex_ai_search.md) is going to be implemented due to the low lock-in and being able to reach customers quickly. It could be moved over to another solution in the future.

View File

@ -0,0 +1,95 @@
---
status: proposed
creation-date: "2024-01-25"
authors: [ "@shinya.maeda", "@mikolaj_wawrzyniak" ]
coach: [ "@stanhu" ]
approvers: [ "@pwietchner", "@oregand", "@tlinz" ]
owning-stage: "~devops::ai-powered"
participating-stages: ["~devops::data stores", "~devops::create"]
---
# PostgreSQL
## Retrieve GitLab Documentation
PGVector is currently being used for the retrieval of relevant documentation for GitLab Duo chat's RAG.
A separate `embedding` database runs alongside `geo` and `main` which has the `pg-vector` extension installed and contains embeddings for GitLab documentation.
- Statistics (as of January 2024):
- Data type: Markdown written in natural language (Unstructured)
- Data access level: Green (No authorization required)
- Data source: `https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc`
- Data size: 147 MB in `vertex_gitlab_docs`. 2194 pages.
- Service: `https://docs.gitlab.com/` ([source repo](https://gitlab.com/gitlab-org/gitlab-docs)
- Example of user input: "How do I create an issue?"
- Example of expected AI-generated response: "To create an issue:\n\nOn the left sidebar, select Search or go to and find your project.\n\nOn the left sidebar, select Plan > Issues, and then, in the upper-right corner, select New issue."
### Synchronizing embeddings with data source
Here is the overview of synchronizing process that is currently running in GitLab.com:
1. Load documentation files of the GitLab instance. i.e. `doc/**/*.md`.
1. Compare the checksum of each file to detect an new, update or deleted documents.
1. If a doc is added or updated:
1. Split the docs with the following strategy:
- Text splitter: Split by new lines (`\n`). Subsequently split by 100~1500 chars.
1. Bulk-fetch embeddings of the chunks from `textembedding-gecko` model (768 dimensions).
1. Bulk-insert the embeddings into the `vertex_gitlab_docs` table.
1. Cleanup the older embeddings.
1. If a doc is deleted:
1. Delete embeddings of the page.
As of today, there are 17345 rows (chunks) on `vertex_gitlab_docs` table on GitLab.com.
For Self-managed instances, we serve embeddings from AI Gateway and GCP's Cloud Storage,
so the above process can be simpler:
1. Download an embedding package from Cloud Storage through AI Gateway API.
1. Bulk-insert the embeddings into the `vertex_gitlab_docs` table.
1. Delete older embeddings.
We generate this embeddings package before GitLab monthly release.
Sidekiq cron worker automatically renews the embeddings by comparing the embedding version and the GitLab version.
If it's outdated, it will download the new embedding package.
Going further, we can consolidate the business logic between SaaS and Self-managed by generating the package every day (or every grpd deployment).
This is to reduce the point of failure in the business logic and let us easily reproduce an issue that reported by Self-managed users.
Here is the current table schema:
```sql
CREATE TABLE vertex_gitlab_docs (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
version integer DEFAULT 0 NOT NULL, -- For replacing the old embeddings by new embeddings (e.g. when doc is updated)
embedding vector(768), -- Vector representation of the chunk
url text NOT NULL,
content text NOT NULL, -- Chunked data
metadata jsonb NOT NULL, -- Additional metadata e.g. page URL, file name
CONSTRAINT check_2e35a254ce CHECK ((char_length(url) <= 2048)),
CONSTRAINT check_93ca52e019 CHECK ((char_length(content) <= 32768))
);
CREATE INDEX index_vertex_gitlab_docs_on_version_and_metadata_source_and_id ON vertex_gitlab_docs USING btree (version, ((metadata ->> 'source'::text)), id);
CREATE INDEX index_vertex_gitlab_docs_on_version_where_embedding_is_null ON vertex_gitlab_docs USING btree (version) WHERE (embedding IS NULL);
```
### Retrieval
After the embeddings are ready, GitLab-Rails can retrieve chunks in the following steps:
1. Fetch embedding of the user input from `textembedding-gecko` model (768 dimensions).
1. Query to `vertex_gitlab_docs` table for finding the nearest neighbors. e.g.:
```sql
SELECT *
FROM vertex_gitlab_docs
ORDER BY vertex_gitlab_docs.embedding <=> '[vectors of user input]' -- nearest neighbors by cosine distance
LIMIT 10
```
### Requirements to get to self-managed
All instances of GitLab have postgres running but allowing instances to administer a separate database for embeddings or combining the embeddings into the main database would require some effort which spans more than a milestone.

View File

@ -0,0 +1,134 @@
---
status: proposed
creation-date: "2024-01-25"
authors: [ "@shinya.maeda", "@mikolaj_wawrzyniak" ]
coach: [ "@stanhu" ]
approvers: [ "@pwietchner", "@oregand", "@tlinz" ]
owning-stage: "~devops::ai-powered"
participating-stages: ["~devops::data stores", "~devops::create"]
---
# Vertex AI Search
## Retrieve GitLab Documentation
- Statistics (as of January 2024):
- Date type: Markdown (Unstructured) written in natural language
- Date access level: Green (No authorization required)
- Data source: `https://gitlab.com/gitlab-org/gitlab/-/blob/master/doc`
- Data size: approx. 56,000,000 bytes. 2194 pages.
- Service: `https://docs.gitlab.com/` ([source repo](https://gitlab.com/gitlab-org/gitlab-docs)
- Example of user input: "How do I create an issue?"
- Example of expected AI-generated response: "To create an issue:\n\nOn the left sidebar, select Search or go to and find your project.\n\nOn the left sidebar, select Plan > Issues, and then, in the upper-right corner, select New issue."
[The GitLab documentation](https://gitlab.com/gitlab-org/gitlab-docs/-/blob/main/doc/architecture.md) is the SSoT service to serve GitLab documentation for SaaS (both GitLab.com and Dedicated) and Self-managed.
When a user accesses to a documentation link in GitLab instance,
they are [redirected to the service](https://gitlab.com/groups/gitlab-org/-/epics/11600#note_1690083049) since 16.0 (except air-gapped solutions).
In addition, the current search backend of `docs.gitlab.com` needs to transition to [Vertex AI Search](https://cloud.google.com/enterprise-search?hl=en). See [this issue](https://gitlab.com/gitlab-com/legal-and-compliance/-/issues/1876) (GitLab member only) for more information.
We introduce a new semantic search API powered by Vertex AI Search for the documentation tool of GitLab Duo Chat.
### Setup in Vertex AI Search
We [create a search app](https://cloud.google.com/generative-ai-app-builder/docs/create-engine-es) for each GitLab versions.
These processes will likely be automated in the [GitLab Documentation project](https://gitlab.com/gitlab-org/gitlab-docs/-/blob/main/doc/architecture.md)
by CI/CD pipelines.
1. Create a new Bigquery table e.g. `gitlab-docs-latest` or `gitlab-docs-v16.4`
1. Download documents from repositories (e.g. `gitlab-org/gitlab/doc`, `gitlab-org/gitlab-runner/docs`, `gitlab-org/omnibus-gitlab/doc`).
1. Split them by Markdown headers and generate metadata (e.g. URL and title).
1. Insert rows into the Bigquery table.
1. [Create a search app](https://cloud.google.com/generative-ai-app-builder/docs/create-engine-es)
See [this notebook](https://colab.research.google.com/drive/1XxYPWkNBnwZ0UG1aJ0Pjb2gfYmLnrHft?usp=sharing) for more implementation details.
The data of the latest version will be refreshed by a nightly build with [Data Store API](https://cloud.google.com/generative-ai-app-builder/docs/reference/rpc).
### AI Gateway API
API design is following the existing patterns in [AI Gateway](../ai_gateway/index.md).
```plaintext
POST /v1/search/docs
```
```json
{
"type": "search",
"metadata": {
"source": "GitLab EE",
"version": "16.3" // Used for switching search apps for older GitLab instances
},
"payload": {
"query": "How can I create an issue?",
"params": { // Params for Vertex AI Search
"page_size": 10,
"filter": "",
},
"provider": "vertex-ai"
}
}
```
The response will include the search results. For example:
```json
{
"response": {
"results": [
{
"id": "d0454e6098773a4a4ebb613946aadd89",
"content": "\nTo create an issue from a group: \n1. On the left sidebar, ...",
"metadata": {
"Header1": "Create an issue",
"Header2": "From a group",
"url": "https://docs.gitlab.com/ee/user/project/issues/create_issues.html"
}
}
]
},
"metadata": {
"provider": "vertex-ai"
}
}
```
See [SearchRequest](https://cloud.google.com/python/docs/reference/discoveryengine/latest/google.cloud.discoveryengine_v1.types.SearchRequest) and [SearchResponse](https://cloud.google.com/python/docs/reference/discoveryengine/latest/google.cloud.discoveryengine_v1.types.SearchResponse) for Vertex AI API specs.
### Proof of Concept
- [GitLab-Rails MR](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/144719)
- [AI Gateway MR](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/-/merge_requests/642)
- [Vertex AI Search service](https://console.cloud.google.com/gen-app-builder/engines?referrer=search&project=ai-enablement-dev-69497ba7)
- [Google Colab notebook](https://colab.research.google.com/drive/1XxYPWkNBnwZ0UG1aJ0Pjb2gfYmLnrHft?usp=sharing)
- [Demo video](https://youtu.be/ipEpMt-U6rQ?feature=shared) (Note: In this video, Website URLs are used as data source).
#### Evaluation score
Here is the evaluation scores generated by [Prompt Library](https://gitlab.com/gitlab-org/modelops/ai-model-validation-and-research/ai-evaluation/prompt-library).
|Setup|correctness|comprehensiveness|readability|evaluating_model|
|---|---|---|---|---|
|New (w/ Vertex AI Search)|3.7209302325581382|3.6976744186046511|3.9069767441860455|claude-2|
|Current (w/ Manual embeddings in GitLab-Rails and PgVector)|3.7441860465116279|3.6976744186046511|3.9767441860465116|claude-2|
<details>
<summary>Dataset</summary>
- Input Bigquery table: `dev-ai-research-0e2f8974.duo_chat_external.documentation__input_v1`
- Output Bigquery table:
- `dev-ai-research-0e2f8974.duo_chat_external_results.sm_doc_tool_vertex_ai_search`
- `dev-ai-research-0e2f8974.duo_chat_external_results.sm_doc_tool_legacy`
- Command: `promptlib duo-chat eval --config-file /eval/data/config/duochat_eval_config.json`
</details>
### Estimated Time of Completion
- Milestone N:
- Setup in Vertex AI Search with CI/CD automation.
- Introduce `/v1/search/docs` endpoint in AI Gateway.
- Updates the retrieval logic in GitLab-Rails.
- Feature flag clean up.
Total milestones: 1

View File

@ -0,0 +1,221 @@
---
status: proposed
creation-date: "2024-02-20"
authors: [ "@bvenker", "@mikolaj_wawrzyniak" ]
coach: [ "@stanhu" ]
approvers: [ "@pwietchner", "@oregand", "@shinya.meda", "@mikolaj_wawrzyniak" ]
owning-stage: "~devops::data stores"
participating-stages: ["~devops::ai-powered", "~devops::create"]
---
# Elasticsearch
Elasticsearch is a search engine and data store which allows generating, storing and querying vectors and performing keyword and semantic search at scale.
Elasticsearch employs a distributed architecture, where data is stored across multiple nodes. This allows for parallel processing of queries, ensuring fast results even with massive datasets.
## Using Elasticsearch as a vector store
Elasticsearch can be used to store embedding vectors up to 4096 dimensions and find the closest neighbours for a given embedding.
![Elasticsearch as vector store](img/elasticsearch_vector_store.png)
### Licensing
Does not require a paid license.
### Indexing embeddings
For every document type (e.g. `gitlab_documentation`), an index is created and stores the original source, embeddings and optional metadata such as URL. An initial backfill is required to index all current documents and a process to upsert or delete documents as the source changes.
For GitLab Duo Documentation, the current async process for generating and storing embeddings in the embeddings database can be altered to index into Elasticsearch.
Using the Advanced Search framework, database records are automatically kept up to date in Elasticsearch. [Issue 442197](https://gitlab.com/gitlab-org/gitlab/-/issues/442197) proposes changing the Elasticsearch framework to allow for other datasets to be indexed.
For documents with large sources that need to be split into chunks, [nested kNN search](https://www.elastic.co/guide/en/elasticsearch/reference/8.12/knn-search.html#nested-knn-search) can be used whereby a single top-level document contains nested objects each with a source and embedding. This enables searching for the top K documents with the most relevant chunks. It is not suited for cases where the top k chunks need to be searched within a single document. In such cases, every chunk should be stored as a separate document.
### Querying context-relevant information
A given question is passed to a model to generate embeddings. The vector is then sent to Elasticsearch to find the most relevant documents.
### Generation
The N most relevant documents are added to a prompt which is sent to an LLM to generate an answer for the original question.
## RAG in Elasticsearch using hosted models
Similar to the above but the question's embeddings are generated from within Elasticsearch.
![RAG overview](img/elasticsearch_rag_hosted_models.png)
### Licensing
Requires a paid license on every cluster.
### Model hosting
Requires model(s) used to be hosted on every cluster which adds effort and cost.
Elasticsearch supports the following models:
- ELSER (Elastic Learned Sparse Encoder): Built-in model provided by Elasticsearch used to generate text embeddings for semantic search.
- TensorFlow Models: Custom TensorFlow models can be deployed for semantic search using the ML APIs.
- Third-Party Models: Elasticsearch supports deploying models from Hugging Face and other providers. This provides access to a wider range of pre-trained models, but deployment and maintenance requires additional work.
## Hybrid Search
Hybrid search combines text and semantic search to return the most revelant sources. A reranker could be used to combine the results from both methods.
![Hybdid search](img/elasticsearch_hybrid_search.png)
### Advanced text search features of Elasticsearch
1. Inverted Indexing: At its core, Elasticsearch relies on a powerful data structure called an inverted index. This index essentially flips the traditional approach, where each document contains a list of words. Instead, the inverted index catalogues every unique word across all documents and tracks where it appears in each one. This enables lightning-fast searches by finding relevant documents based on matching words instantly.
1. Advanced Text Analysis: Elasticsearch doesn't simply match whole words. It leverages text analyzers to break down and understand text intricacies. This includes handling:
- Stemming and lemmatization: Reducing words to their root form (e.g., "running" and "ran" both matching "run").
- Synonyms and related terms: Recognizing synonyms and similar words to expand search results.
- Stop words: Ignoring common words like "the" and "a" that don't contribute much to meaning.
- Custom analysis: Defining your own rules for specific domains or languages.
1. Powerful Query Capabilities: Elasticsearch goes beyond basic keyword searches. It supports complex queries using Boolean operators (AND, OR, NOT), proximity searches (finding words close together), fuzzy searches (handling typos), and more. You can also filter results based on other criteria alongside text matching.
### Reranking
Elasticsearch currently supports [Reciprocal rank fusion (RRF)](https://www.elastic.co/guide/en/elasticsearch/reference/current/rrf.html) which works out-the-box. They also released [Learning to Rank](https://elasticsearch-learning-to-rank.readthedocs.io/en/latest/) which uses ML to improve ranking.
## Running Elasticsearch
Elasticsearch is available on GitLab.com and can be integrated on Dedicated and Self-Managed instances. To use as a vector store only:
- [Install Elasticsearch version `8.12`](../../../integration/advanced_search/elasticsearch.md#install-elasticsearch) or upgrade to at least version `8.12`.
- Add URL, Username and Password on the Advanced Search settings page: `admin/application_settings/advanced_search`
After the integration is configured, instance admins don't need to do further work to use it as a vector store since the GitLab Elasticsearch framework handles setting mappings, settings and indexing data.
## Supported dimensions
Elasticsearch can store up to 4096 dimensions and OpenSearch up to 16000 dimensions, compared to `pg_vector` which can store up to 2000.
## Limitations
### Licensing
In order to use the ML capabilities offered by Elastic, every cluster has to have a valid license.
If Elastic is used only as a vector store and all embeddings generated outside of Elastic, a license is not required.
### Adoption
The Elastic integration is available to all GitLab instances to unlock Advanced Search but not all instances have chosen to run the integration. There is also an additional cost for every instance to host the integration.
## Performance and scalability
Elasticsearch is horizontally scalable and handles storing and querying at scale. An Elasticsearch cluster consists of multiple nodes each contributing resources.
## Cost
Elastic Cloud pricing for GitLab Documentation vector storage is about $38 per month and the price scales with storage requirements.
## Elasticseach vs. OpenSearch
### Features
Both offer storing vector embeddings and similarity search (kNN).
Elasticsearch supports custom TensorFlow models which OpenSearch does not offer. Both offer pre-trained models.
The APIs for kNN searching differ slightly between the two platforms but work in the same way.
### Supported platforms
Currently GitLab offers Advanced Search for both Elasticsearch and OpenSearch due to parity between the text search APIs. If both are supported for AI features, there would be a need to adapt to two different AI APIs.
## PoC: Repository X Ray
To test the viability of Elasticsearch for generating embeddings, a PoC was done with Repository X Ray project.
Repository X Ray hasn't yet implemented any semantic seach and this section is based soely on a [prototype implementation](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/144715)
- Statistics (as of February 2024):
- Data type: JSON document with source code libraries descriptions in natural language
- Data access level: Red (each JSON document belongs to specific project, and data access rules should adhere to data access rules configure for that project)
- Data source: Repository X Ray report CI artifact
- Data size: N/A
- Example of user input: "# generate function that fetches sales report for vendor from App Store"
- Example of expected AI-generated response:
```python
def sales_reports(vendor_id)\n app_store_connect.sales_reports(\n filter: {\n report_type: 'SALES',\n report_sub_type: 'SUMMARY',\n frequency: 'DAILY',
vendor_number: '123456'\n }\n)\nend
```
### Synchronizing embeddings with data source
In a similar manner as with the [documentation example](../gitlab_duo_rag/elasticsearch.md#retrieve-gitlab-documentation) Repository X Ray report data is a derivative. It uses an underlying repository source code as a base, and it must be synchronised with it, whenever any changes to the source code occurs.
Right now there is no synchronisation mechanism that includes embeddings and vector storage. However there is an existing pipeline that generates and stores Repository X Ray reports.
The ingestion pipeline is performed in following steps:
1. A CI X Ray scanner job is triggered - a documentation [page](../../../user/project/repository/code_suggestions/repository_xray.md#enable-repository-x-ray) suggest limiting this job to be executed only when changes occur to the main repository branch. However repository maintainers may configure trigger rules differently.
- An X Ray [scanner](https://gitlab.com/gitlab-org/code-creation/repository-x-ray) locates and process one of the supported [dependencies files](../../../user/project/repository/code_suggestions/repository_xray.md#supported-languages-and-package-managers), producing JSON report files.
1. After the X Ray scanner job finish sucessfully a [background job](https://gitlab.com/gitlab-org/gitlab/-/blob/c6b2f18eaf0b78a4e0012e88f28d643eb0dfb1c2/ee/app/workers/ai/store_repository_xray_worker.rb#L18) is triggered in GitLab Rails monolith that imports JSON report into [`Projects::XrayReport`](https://gitlab.com/gitlab-org/gitlab/-/blob/bc2ad40b4b026dd359e289cf2dc232de1a2d3227/ee/app/models/projects/xray_report.rb#L22).
- There can be only one Repository X Ray report per project in the scope of programming language, duplicated records are being upserted during import process
As of today, there are 84 rows on `xray_reports` table on GitLab.com.
### Retrieval
After Repository X Ray report gets imported, when IDE extension sends request for a [code generation](../../../user/project/repository/code_suggestions/index.md),
Repository X Ray report is retrieved in the following steps:
1. GitLab Rails monotlith fetches corresponding `xray_reports` record from main database. `xray_reports` records are filiterd based on `project_id` foreign key, and `lang` columns.
1. From retrieved record first 50 dependencies are being added into a prompt that is forwarded to AI Gateway
### Current state overview
```mermaid
sequenceDiagram
actor USR as User
participant IDE
participant GLR as GitLabRails
participant RN as GitLabRunner
participant PG as GitLabPsqlMainDB
participant AIGW as AIGateway
USR->>+GLR: commits changes to Gemfile.lock
GLR->>RN: triggers Repository X Ray CI scanner job
RN->>GLR: Repository X Ray report
GLR->>GLR: triggers Repository X Ray ingestion job
GLR->>-PG: upserts xray_reports record
USR->>+IDE: types: "#35; generate function that fetches sales report for vendor from App Store"
IDE->>+GLR: trigger code generation for line ` "#35; generate function `
GLR->>PG: fetch X Ray report for project and language
PG->>GLR: xray_reports record
GLR->>GLR: include first 50 entities from xray report into code generation prompt
GLR->>-AIGW: trigger code generation ` "#35; generate function `
```
### Embeddings prospect application
As described in retrieval section above, currently Repository X Ray reports follow very naive approach, that does not include any metric for assessing relevance between Repository X Ray report content and
user instruction. Therefore applying embeddings and semantic search to X Ray report has a high potential of improving results by selecting limited set of related entries from Repository X Ray report based on user instruction.
To achieve that embeddings should be generated during Repository X Ray ingestion. Additionally an user instruction should be turned into embeddings vector to perform semantic search over stored Repository X Ray report data during retrieval process.
### Elasticsearch and PGVector comparison
Following paragraph is a result of [PoC](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/144715) work.
From a product feature implementation point of view both solutions seems as viable ones, offering in the current state all necessary tools to support the product features requirements.
Given the Elasticsearch built in capabilities it is acknowledged that it might bring better long term support, enabling more powerful RAG solution in the future than `pg_vector` based ones.
The current Elasticsearch integration only indexes `ActiveRecord` models and
source code from Git repositories. Further work required to build more
generic abstractions to index other data (eg. X-Ray reports)
has been defined by [issue 442197](https://gitlab.com/gitlab-org/gitlab/-/issues/442197).
To prevent suboptimal workarounds of existing limitation
which will create technical debt, it is advised that [issue 442197](https://gitlab.com/gitlab-org/gitlab/-/issues/442197)
is completed before Elasticsearch is selected as main vector storage for RAG.

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 112 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 115 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 70 KiB

View File

@ -0,0 +1,166 @@
---
status: proposed
creation-date: "2024-02-20"
authors: [ "@bvenker", "@mikolaj_wawrzyniak" ]
coach: [ "@stanhu" ]
approvers: [ "@pwietchner", "@oregand", "@shinya.meda", "@mikolaj_wawrzyniak" ]
owning-stage: "~devops::data stores"
participating-stages: ["~devops::ai-powered", "~devops::create"]
---
# Retrieval Augmented Generation (RAG) for GitLab
## Goals
The goal of this blueprint is to describe viable options for RAG at GitLab across deployment types. The aim is to describe RAG implementations that provide our AI featuresand by extension our customerswith best-in-class user experiences.
## Overview of RAG
RAG, or Retrieval Augmented Generation, involves several key process blocks:
- **Input Transformation**: This step involves processing the user's input, which can vary from natural language text to JSON or keywords. For effective query construction, we might utilize Large Language Models (LLMs) to format the input into a standard expected format or to extract specific keywords.
- **Retrieval**: Here, we fetch relevant data from specified data sources, which may include diverse storage engines like vector, graph, or relational databases. It's crucial to conduct [data access checks](#data-access-policy) during this phase. After retrieval, the data should be optimized for LLMs through post-processing to enhance the quality of the generated responses.
- **Generation**: This phase involves crafting a prompt with the retrieved data and submitting it to an LLM, which then generates an AI-powered response.
![Current page](img/blog_figure-1.jpg)
(Image from [Deconstructing RAG](https://blog.langchain.dev/deconstructing-rag/))
## Challenges of RAG
### Data for LLMs
Ensuring data is optimized for LLMs is crucial for consistently generating high-quality AI responses. Several challenges exist when providing context to LLMs:
- **Long Contexts:** Extensive contexts can degrade LLM performance, a phenomenon known as the Lost in the Middle problem. Employing Rerankers can enhance performance but may also increase computational costs due to longer processing times.
- **Duplicate Contents:** Repetitive content can reduce the diversity of search results. For instance, if a semantic search yields ten results indicating "Tom is a president" but the eleventh reveals "Tom lives in the United States," solely using the top ten would omit critical information. Filtering out duplicate content, for example, through Maximal Marginal Relevance (MMR), can mitigate this issue.
- **Conflicting Information:** Retrieving conflicting data from multiple sources can lead to LLM "hallucinations." For example, mixing sources that define "RAG" differently can confuse the LLM. Careful source selection and content curation are essential.
- **Irrelevant Content:** Including irrelevant data can negatively impact LLM performance. Setting a threshold for relevance scores or considering that certain irrelevant contents might actually enhance output quality are strategies to address this challenge.
It's highly recommended to evaluate the optimal data format and size for maximizing LLM performance, as the effects on performance and result quality can vary significantly based on the data's structure.
References:
- [Benchmarking Methods for Semi-Structured RAG](https://youtu.be/KMZZh7Z5mno?si=-Gr-acXcjg7QXmBU)
- [Edge cases of semantic search](https://youtu.be/DY3sT4yIezs?feature=shared&t=1382)
#### Regenerating Embeddings
The AI field is evolving rapidly and new models and approaches seem to appear daily that could improve our users' experience, we want to conscious of model switching costs. If we decide to swap models or change our chunking strategy (as two examples), we will need to wipe our existing embeddings and do a full replacement with embeddings from the new model or with the new text chunks, etc. Factors to consider which could trigger the need for a full regeneration of embeddings for the affected data include:
- A change in the optimal text chunk size
- A change in a preprocessing step which perhaps adds new fields to a text chunk
- Exclusion of content, such as the removal of a field that was previously embedded
- Addition of new metadata that needs to be embedded
### Multi-source Retrieval
Addressing complex queries may require data from multiple sources. For instance, queries linking issues to merge requests necessitate fetching details from both. GitLab Duo Chat, utilizing the [ReACT framework](https://arxiv.org/abs/2210.03629), sequentially retrieves data from PostgreSQL tables, which can prolong the retrieval process due to the sequential execution of multiple tools and LLM inferences.
## Searching for Data
Choosing the appropriate search method is pivotal for feature design and UX optimization. Here are common search techniques:
### Semantic Search
Semantic search shines when handling complex queries that demand an understanding of the context or intent behind the words, not just the words themselves. It's particularly effective for queries expressed in natural language, such as full sentences or questions, where the overall meaning outweighs the importance of specific keywords. Semantic search excels at providing thorough coverage of a topic, capturing related concepts that may not be directly mentioned in the query, thus uncovering more nuanced or indirectly related information.
In the realm of semantic search, the K-Nearest Neighbors (KNN) method is commonly employed to identify data segments that are semantically closer to the user's input. To measure the semantic proximity, various methods are used:
- **Cosine Similarity:** Focuses solely on the direction of vectors.
- **L2 Distance (Euclidean Distance):** Takes into account both the direction and magnitude of vectors.
These vectors, known as "embeddings," are created by processing the data source through an embedding model. Currently, in GitLab production, we utilize the `textembedding-gecko` model provided by Vertex AI. However, there might be scenarios where you consider using alternative embedding models, such as those available on HuggingFace, to reduce costs. Opting for different models requires comprehensive evaluation and consultation, particularly with the legal team, to ensure the chosen model's usage complies with GitLab policies. See the [Security, Legal, and Compliance](https://gitlab.com/gitlab-org/gitlab/-/blob/52f4fcb033d13f3d909a777728ba8f3fa2c93256/doc/architecture/blueprints/gitlab_duo_rag/index.md#security-legal-and-compliance) section for more details. It's also important to note that multilingual support can vary significantly across different embedding models, and switching models may lead to regressions.
For large datasets, it's advisable to implement indexes to enhance query performance. The HNSW (Hierarchical Navigable Small World) method, combined with approximate nearest neighbors (ANN) search, is a popular strategy for this purpose. For insights into HNSW's effectiveness, consider reviewing [benchmarks on its performance in large-scale applications](https://supabase.com/blog/increase-performance-pgvector-hnsw).
### Keyword Search
Keyword search is the go-to method for straightforward, specific queries where users are clear about their search intent and can provide precise terms or phrases. This method is highly effective for retrieving exact matches, making it suitable for searches within structured databases or when looking for specific documents, terms, or phrases.
Keyword search operates on the principle of matching the query terms directly with the content in the database or document collection, prioritizing results that have a high frequency of the query terms. Its efficiency and directness make it particularly useful for situations where users expect quick and precise results based on specific keywords or phrases.
### Hybrid Search
Hybrid search combines the depth of semantic search with the precision of keyword search, offering a comprehensive search solution that caters to both context-rich and specific queries. By running both semantic and keyword searches simultaneously, it integrates the strengths of both methods—semantic search's ability to understand the context and keyword search's precision in identifying exact matches.
The results from both searches are then combined, with their relevance scores normalized to provide a unified set of results. This approach is particularly effective in scenarios where queries may not be fully served by either method alone, offering a balanced and nuanced response to complex search needs. The computational demands of kNN searches, which are part of semantic search, are contrasted with the relative efficiency of [BM25](https://medium.com/@evertongomede/understanding-the-bm25-ranking-algorithm-19f6d45c6ce) keyword searches, making hybrid search a strategic choice for optimizing performance across diverse datasets.
### Code Search
Like the other data types above, a source code search task can utilize different search types, each more suited to address different queries. Currently, [Zoekt](../code_search_with_zoekt/index.md) is employed on GitLab.com to provide exact match keyword search and regular expression search capabilities for source code. Semantic search and hybrid search functionalities are yet to be implemented for code.
### ID Search
Facilitates data retrieval using specific resource IDs, such as issue links. For example retrieving data from the specified resource ID, such as an Issue link or a shortcut. See [ID search](postgresql.md#id-search) for more information.
### Knowledge Graph
Knowledge Graph search transcends the limitations of traditional search methods by leveraging the interconnected nature of data represented in graph form. Unlike semantic search, which focuses on content similarity, Knowledge Graph search understands and utilizes the relationships between different data points, providing a rich, contextual exploration of data.
This approach is ideal for queries that benefit from understanding the broader context or the interconnectedness of data entities. Graph databases store relationships alongside the data, enabling complex queries that can navigate these connections to retrieve highly contextual and nuanced information.
Knowledge Graphs are particularly useful in scenarios requiring deep insight into the relationships between entities, such as recommendation systems, complex data analysis, and semantic querying, offering a dynamic way to explore and understand large, interconnected datasets.
## Security, Legal and Compliance
### Data access policy
The retrieval process must comply with the [GitLab Data Classification Standard](https://handbook.gitlab.com/handbook/security/data-classification-standard/). If the user doesn't have access to the data, GitLab will not fetch the data for building a prompt.
For example:
- When the data is GitLab Documentation (GREEN level), the data can be fetched without authorizations.
- When the data is customer data such as issues, merge requests, etc (RED level), the data must be fetched with proper authorizations based on permissions and roles.
If you're proposing to fetch data from an external public database (e.g. fetching data from `arxiv.org` so the LLM can answer questions about quantitative biology), please conduct a thorough review to ensure the external data isn't inappropriate for GitLab to process.
### Data usage
Using a new embedding model or persisting data into a new storage would require [legal reviews](https://handbook.gitlab.com/handbook/legal/). See the following links for more information:
- [Data privacy](../../../user/ai_data_usage.md#data-privacy)
- [Data retention](../../../user/ai_data_usage.md#data-retention)
- [Training data](../../../user/ai_data_usage.md#training-data)
## Evaluation
Evaluation is a crucial step in objectively determining the quality of the retrieval process. Tailoring the retrieval process based on specific user feedback can lead to biased optimizations, potentially causing regressions for other users. It's essential to have a dedicated test dataset and tools for a comprehensive quality assessment. For assistance with AI evaluation, please reach out to the [AI Model Validation Group](https://handbook.gitlab.com/handbook/engineering/development/data-science/model-validation/).
## Before Implementing RAG
Before integrating Retrieval Augmented Generation (RAG) into your system, it's important to evaluate whether it enhances the quality of AI-generated responses. Consider these essential questions:
- **What does typical user input look like?**
- For instance, "Which class should we use to make an external HTTP request in this repository?"
- **What is the desired AI-generated response?**
- Example: "Within this repository, Class-A is commonly utilized for..."
- **What are the current responses from LLMs?** (This helps determine if the necessary knowledge is already covered by the LLM.)
- Example: Receiving a "Sorry, I don't have an answer for that." from the Anthropic Claude 2.1 model.
- **What data is required in the LLM's context window?**
- Example: The code for Class-A.
- **Consider the current search method used for similar tasks**. (Ask yourself: How would I currently search for this data with the tools at my disposal?)
- Example: Navigate to the code search page and look for occurrences of "http."
- **Have you successfully generated the desired AI response with sample data?** Experiment in a third-party prompt playground or Google Colab to test.
- **If contemplating semantic search**, it's **highly recommended** that you develop a prototype first to ensure it meets your specific retrieval needs. Semantic search may interpret queries differently than expected, especially when the data source lacks natural language context, such as uncommented code. In such cases, semantic search might not perform as well as traditional keyword search methods. Here's [an example prototype](https://colab.research.google.com/drive/1K1gf6FibV-cjlXvTJPboQJtjYcSsyYi2?usp=sharing) that demonstrates semantic search for CI job configurations.
## Evaluated Solutions
The following solutions have been validated with PoCs to ensure they meet the basic requirements of vector storage and retrieval for GitLab Duo Chat with GitLab documentation. Click the links to learn more about each solutions attributes that relate to RAG:
- [PostgreSQL with PGVector](postgresql.md)
- [Elasticsearch](elasticsearch.md)
- [Google Vertex](vertex_ai_search.md)
To read more about the [GitLab Duo Chat PoCs](../gitlab_duo_rag/index.md) conducted, see:
- [PGVector PoC](../gitlab_duo_rag/postgresql.md)
- [Elasticsearch PoC](../gitlab_duo_rag/elasticsearch.md)
- [Google Vertex PoC](../gitlab_duo_rag/vertex_ai_search.md)
## Proposed solution
_Disclaimer: This blueprint is in the first iteration and the chosen solutions could change._
Due to the existing framework and scalability of Elasticsearch, embeddings will be stored on Elasticsearch for large datasets such as [issues](https://gitlab.com/gitlab-org/gitlab/-/issues/451431), merge requests, etc. This will be used to perform [Hybrid Search](https://gitlab.com/gitlab-org/gitlab/-/issues/440424) but will also be useful for other features such as finding duplicates, similar results or categorizing documents.
[Vertext AI Search](../gitlab_duo_rag/vertex_ai_search.md) is going to be implemented to serve GitLab DUO documentation for self-managed instances.

View File

@ -0,0 +1,140 @@
---
status: proposed
creation-date: "2024-02-20"
authors: [ "@bvenker", "@mikolaj_wawrzyniak" ]
coach: [ "@stanhu" ]
approvers: [ "@pwietchner", "@oregand", "@shinya.meda", "@mikolaj_wawrzyniak" ]
owning-stage: "~devops::data stores"
participating-stages: ["~devops::ai-powered", "~devops::create"]
---
# PostgreSQL
This page explains how to retrieve data from PostgreSQL for [RAG](index.md).
## Semantic search
### Overview
1. Install [PgVector extension](#vector-store-with-pgvector) to the PostgreSQL database.
1. Add a `vector` column to a new or existing table.
1. Data <=> Embedding synchronization
1. Load data which you want to search from.
1. Pass the data to an embedding model and get an vector.
1. Set the vector to the `vector` column.
1. Retrieval
1. Pass the user input to an embedding model and get an vector.
1. Get the nearest neighbors to the user input vector e.g. `SELECT * FROM a_table ORDER BY vector_column <-> '<user-input-vector>' LIMIT 5;`
### Vector store with PgVector
To store the embeddings for semantic search, we need to add a vector store in GitLab PostgreSQL.
This vector store can be added by installing [PgVector extension](https://github.com/pgvector/pgvector) (Postgres 12+ is required).
A vector store is currently running on GitLab.com and it's separately hosted from the main/CI databases.
Our current architecture of having a separate database for embeddings is probably ideal. We don't gain much by combining them and, as PGVector is all new and will likely require a lot of experimenting to get performance at scale (today we only have a tiny amount of data in it), we'll have a lot more options to experiment with without impacting overall GitLab.com stability (if PGVector is on a separate database). Having a separate database is recommended because it allows for experimentation without impacting performance of the main database.
### Limitations
- It could be locked down to a specific embedding model, because you must specify the dimensions of the vector column.
- Vectors with up to 2,000 dimensions can be indexed.
### Performance and scalability implications
- Is there any guidance on how much data we can add to the PostgreSQL (regardless of the vector data or normal data)?
- Not really, as we do not usually just add data to the database, but rather it's a result of the instance being used. I don't see any specific [storage requirements](../../../install/requirements.md#storage). If the existing `vertex_gitlab_docs` table size is a good indicator, we probably can add this without causing much trouble, though having an option to opt-in or opt-out is preferable.
### Availability
- PostgreSQL is availble in all GitLab installations (both CNG and Omnibus).
- Most major cloud providers have added PgVector to their offerings by now: Google Cloud SQL and Alloy DB, DigitalOcean, AWS RDS and Aurora, Azure Flexible and Cosmos, etc. There might be a case where customers would need to upgrade to versions that support PGVector.
## ID search
### Overview
1. Execute a few-shot prompts to extract a resource identifier from the user input.
- e.g. When user asks `Can you summarize #12312312?`, ResourceIdentifier is `12312312` as a GitLab-Issue.
1. Retrieve the record from the PostgreSQL. e.g. `Issue.find(12312312)`
1. Check if the user can read the resource.
1. Build a prompt with the retrieved data and passing it to an LLM to get a AI-generated response.
## PoC: Repository X Ray
Repository X Ray hasn't yet implemented any semantic seach and this section is based soely on a [prototype implementation](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/142912)
- Statistics (as of February 2024):
- Date type: JSON document with source code libraries desctiptions in natural language
- Date access level: Red (each JSON document belongs to specific project, and data access rules should adhere to data access rules configure for that project)
- Data source: Repository X Ray report CI artifact
- Data size: N/A
- Example of user input: "# generate function that fetches sales report for vendor from App Store"
- Example of expected AI-generated response:
```python
def sales_reports(vendor_id)\n app_store_connect.sales_reports(\n filter: {\n report_type: 'SALES',\n report_sub_type: 'SUMMARY',\n frequency: 'DAILY',
vendor_number: '123456'\n }\n)\nend
```
### Synchronizing embeddings with data source
In symilar manner as with the [documentation example](../gitlab_duo_rag/postgresql.md#retrieve-gitlab-documentation) Repository X Ray report data is a derivative. It uses an underlaying repository source code as a base,
and it must be synchronised with it, whenever any changes to the source code occurs.
Right now there is no synchronisation mechanism that includes embeddings and vector storage. However there is an existing pipeline that generates and stores Repository X Ray reports
The ingestion pipeline is performed in following steps:
1. A CI X Ray scanner job is triggered - a documentation [page](../../../user/project/repository/code_suggestions/repository_xray.md#enable-repository-x-ray) suggest limiting this job to be executed only when changes occur to the main repository branch. However repository maintainers may configure trigger rules differently.
1. An X Ray [scanner](https://gitlab.com/gitlab-org/code-creation/repository-x-ray) locates and process one of the supported [dependencies files](../../../user/project/repository/code_suggestions/repository_xray.md#supported-languages-and-package-managers), producing JSON report files
1. After the X Ray scanner job finish sucessfully a [background job](https://gitlab.com/gitlab-org/gitlab/-/blob/c6b2f18eaf0b78a4e0012e88f28d643eb0dfb1c2/ee/app/workers/ai/store_repository_xray_worker.rb#L18) is triggered in GitLab Rails monolith that imports JSON report into [`Projects::XrayReport`](https://gitlab.com/gitlab-org/gitlab/-/blob/bc2ad40b4b026dd359e289cf2dc232de1a2d3227/ee/app/models/projects/xray_report.rb#L22)
1. There can be only one Repository X Ray report per project in the scope of programming language, duplicated records are being upserted during import process
As of today, there are 84 rows on `xray_reports` table on GitLab.com.
### Retrieval
After Repository X Ray report gets imported, when IDE extension sends request for a [code generation](../../../user/project/repository/code_suggestions/index.md), Repository X Ray report is retrieved, in following steps
1. Fetch embedding of the user input from `textembedding-gecko` model (768 dimensions).
1. Query to `vertex_gitlab_docs` table for finding the nearest neighbors. For example:
```sql
SELECT *
FROM vertex_gitlab_docs
ORDER BY vertex_gitlab_docs.embedding <=> '[vectors of user input]' -- nearest neighbors by cosine distance
LIMIT 10
```
1. GitLab Rails monotlith fetches corresponding `xray_reports` record from main database. `xray_reports` records are filiterd based on `project_id` foreign key, and `lang` columns.
1. From retrieved record first 50 dependencies are being added into a prompt that is forwarded to AI Gateway
### Current state overview
```mermaid
sequenceDiagram
actor USR as User
participant IDE
participant GLR as GitLabRails
participant RN as GitLabRunner
participant PG as GitLabPsqlMainDB
participant AIGW as AIGateway
USR->>+GLR: commits changes to Gemfile.lock
GLR->>RN: triggers Repository X Ray CI scanner job
RN->>GLR: Repository X Ray report
GLR->>GLR: triggers Repository X Ray ingestion job
GLR->>-PG: upserts xray_reports record
USR->>+IDE: types: "#35; generate function that fetches sales report for vendor from App Store"
IDE->>+GLR: trigger code generation for line ` "#35; generate function `
GLR->>PG: fetch X Ray report for project and language
PG->>GLR: xray_reports record
GLR->>GLR: include first 50 entities from xray report into code generation prompt
GLR->>-AIGW: trigger code generation ` "#35; generate function `
```
### Embeddings prospect application
As described in retrieval section above, currently Repository X Ray reports follows very naive approach, that does not iclude any metric for assesing relevance between Repository X Ray report content and user instruction. Therefore applying embeddings and semantic search to X Ray report has a high potential of improving results by selecting limited set of related entries from Repository X Ray report based on user instruction.
To achieve that embeddings should be generated during Repository X Ray ingestion. Additionaly an user instruction should be turned into embeddings vector to perform semantic search over stored Repository X Ray report data during retrieval process.

View File

@ -0,0 +1,87 @@
---
status: proposed
creation-date: "2024-02-20"
authors: [ "@bvenker", "@mikolaj_wawrzyniak" ]
coach: [ "@stanhu" ]
approvers: [ "@pwietchner", "@oregand", "@shinya.meda", "@mikolaj_wawrzyniak" ]
owning-stage: "~devops::data stores"
participating-stages: ["~devops::ai-powered", "~devops::create"]
---
# Vertex AI Search
This page explains how to retrieve data from Google Vertex AI Search for [RAG](index.md).
## Overview
Some of our data are public resources that don't require [data access check](index.md#data-access-policy) when retrieving.
These data are often identical across GitLab instances so it's redundant to ingest the same data into every single database.
It'd be more efficient to serve the data from the single service.
We can use [Vertex AI Search](https://cloud.google.com/vertex-ai-search-and-conversation?hl=en) in this case.
It can search at scale, with high queries per second (QPS), high recall, low latency, and cost efficiency.
This approach allows us to minimize code that we can't update on a customer's behalf, which means avoiding hard-coding AI-related logic in the GitLab monolith codebase. We can retain the flexibility to make changes in our product without asking customers to upgrade their GitLab version.
This is same with the [AI Gateway](../ai_gateway/index.md)'s design principle.
```mermaid
flowchart LR
subgraph GitLab managed
subgraph AIGateway
VertexAIClient["VertexAIClient"]
end
subgraph Vertex AI Search["Vertex AI Search"]
subgraph SearchApp1["App"]
direction LR
App1DataStore(["BigQuery"])
end
subgraph SearchApp2["App"]
direction LR
App2DataStore(["Cloud Storage / Website URLs"])
end
end
end
subgraph SM or SaaS GitLab
DuoFeatureA["Duo feature A"]
DuoFeatureB["Duo feature B"]
end
DuoFeatureA -- Semantic search --- VertexAIClient
DuoFeatureB -- Semantic search --- VertexAIClient
VertexAIClient -- Search from Gitlab Docs --- SearchApp1
VertexAIClient -- Search from other data store --- SearchApp2
```
## Limitations
- Data **must be** [GREEN level](index.md#data-access-policy) and publicly shareable.
- Examples:
- GitLab documentations (`gitlab-org/gitlab/doc`, `gitlab-org/gitlab-runner/docs`, `gitlab-org/omnibus-gitlab/doc`, etc)
- Dynamically construct few-shot prompt templates with [Example selectors](https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/).
**IMPORTANT: We do NOT persist customer data into Vertex AI Search. See the other solutions for persisting customer data.**
## Performance and scalability implications
- GitLab-side: Vertex AI Search can [search at scale, with high queries per second (QPS), high recall, low latency, and cost efficiency](https://cloud.google.com/vertex-ai/docs/vector-search/overview).
- GitLab-side: Vertex AI Search supports [global and multi-region deployments](https://cloud.google.com/generative-ai-app-builder/docs/locations).
- Customer-side: The outbound requests from their GitLab Self-managed instances could cause more network latency than retrieving from a local vector store.
This latency issue is addressable by multi-region deployments.
## Availability
- Customer-side: Air-gapped solutions can't be supported due to the required access to AI Gateway (`cloud.gitlab.com`).
This concern would be negligible since GitLab Duo already requires the access.
- Customer-side: Since the service is the single point of failure, retrievers stop working when the service is down.
## Cost implications
- GitLab-side: See [Vertex AI Search pricing](https://cloud.google.com/generative-ai-app-builder/pricing).
- Customer-side: No additional cost required.
## Maintenance
- GitLab-side: GitLab needs to maintain the data store (e.g. Structured data in Bigquery or unstructured data in Cloud Storage). Google automatically detects the schema and indexes the stored data.
- Customer-side: No maintenance required.

View File

@ -295,6 +295,31 @@ spec:
script: ./lint --$[[ inputs.linter ]] --path=$[[ inputs.lint-path ]]
```
### Reuse configuration in `inputs`
To reuse configuration with `inputs`, you can use [YAML anchors](yaml_optimization.md#anchors).
For example, to reuse the same `rules` configuration with multiple components that support
`rules` arrays in the inputs:
```yaml
.my-job-rules: &my-job-rules
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
include:
- component: gitlab.com/project/path/component1@main
inputs:
job-rules: *my-job-rules
- component: gitlab.com/project/path/component2@main
inputs:
job-rules: *my-job-rules
```
You cannot use [`!reference` tags](yaml_optimization.md#reference-tags) in inputs,
but [issue 424481](https://gitlab.com/gitlab-org/gitlab/-/issues/424481) proposes adding
this functionality.
## Specify functions to manipulate input values
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/409462) in GitLab 16.3.

View File

@ -110,12 +110,18 @@ The following models have been approved for use:
### Vector stores
NOTE:
There is a proposal to change vector stores for improving the quality of search results. See [RAG for GitLab Duo](../architecture/blueprints/gitlab_duo_rag/index.md) for more information.
The following vector stores have been approved for use:
- [`pgvector`](https://github.com/pgvector/pgvector) is a Postgres extension adding support for storing vector embeddings and calculating ANN (approximate nearest neighbor).
### Indexing Update
NOTE:
There is a proposal to change indexing update for improving the quality of search results. See [RAG for GitLab Duo](../architecture/blueprints/gitlab_duo_rag/index.md) for more information.
We are currently using sequential scan, which provides perfect recall. We are considering adding an index if we can ensure that it still produces accurate results, as noted in the `pgvector` indexing [documentation](https://github.com/pgvector/pgvector#indexing).
Given that the table contains thousands of entries, indexing with these updated settings would likely improve search speed while maintaining high accuracy. However, more testing may be needed to verify the optimal configuration for this dataset size before deploying to production.

View File

@ -22,7 +22,7 @@ There is a difference in the setup for Saas and self-managed instances.
We recommend to start with a process described for SaaS-only AI features.
1. [Setup SaaS-only AI features](index.md#saas-only-features).
1. [Setup self-managed AI features](index.md#local-setup).
1. [Setup self-managed AI features](index.md#set-up).
## Working with GitLab Duo Chat
@ -40,7 +40,7 @@ If you find an undocumented issue, you should document it in this section after
| Problem | Solution |
|-----------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| There is no Chat button in the GitLab UI. | Make sure your user is a part of a group with enabled experimental and beta features. |
| Chat replies with "Forbidden by auth provider" error. | Backend can't access LLMs. Make sure your [AI Gateway](index.md#local-setup) is set up correctly. |
| Chat replies with "Forbidden by auth provider" error. | Backend can't access LLMs. Make sure your [AI Gateway](index.md#set-up) is set up correctly. |
| Requests take too long to appear in UI | Consider restarting Sidekiq by running `gdk restart rails-background-jobs`. If that doesn't work, try `gdk kill` and then `gdk start`. Alternatively, you can bypass Sidekiq entirely. To do that temporary alter `Llm::CompletionWorker.perform_async` statements with `Llm::CompletionWorker.perform_inline` |
| There is no chat button in GitLab UI when GDK is running on non-SaaS mode | You do not have cloud connector access token record or seat assigned. To create cloud connector access record, in rails console put following code: `CloudConnector::Access.new(data: { available_services: [{ name: "duo_chat", serviceStartTime: ":date_in_the_future" }] }).save`. |

View File

@ -41,7 +41,91 @@ Gitlab::CurrentSettings.update(vertex_ai_project: "PROJECT_ID")
Gitlab::CurrentSettings.update!(anthropic_api_key: "<insert API key>")
```
### Local setup
### Embeddings database
NOTE:
There is a proposal to change embeddings database for improving the quality of search results. See [RAG for GitLab Duo](../../architecture/blueprints/gitlab_duo_rag/index.md) for more information.
Embeddings are generated through the [VertexAI text embeddings API](https://cloud.google.com/vertex-ai/docs/generative-ai/embeddings/get-text-embeddings). The sections
below explain how to populate embeddings in the DB or extract embeddings to be
used in specs.
#### Set up
1. Enable [`pgvector`](https://gitlab.com/gitlab-org/gitlab-development-kit/-/blob/main/doc/howto/pgvector.md#enable-pgvector-in-the-gdk) in GDK
1. Enable the embedding database in GDK
```shell
gdk config set gitlab.rails.databases.embedding.enabled true
```
1. Run `gdk reconfigure`
1. Run database migrations to create the embedding database
```shell
RAILS_ENV=development bin/rails db:migrate
```
#### Populate
Seed your development database with the embeddings for GitLab Documentation
using this Rake task:
```shell
RAILS_ENV=development bundle exec rake gitlab:llm:embeddings:vertex:seed
```
This Rake Task populates the embeddings database with a vectorized
representation of all GitLab Documentation. The file the Rake Task uses as a
source is a snapshot of GitLab Documentation at some point in the past and is
not updated regularly. As a result, it is helpful to know that this seed task
creates embeddings based on GitLab Documentation that is out of date. Slightly
outdated documentation embeddings are sufficient for the development
environment, which is the use-case for the seed task.
When writing or updating tests related to embeddings, you may want to update the
embeddings fixture file:
```shell
RAILS_ENV=development bundle exec rake gitlab:llm:embeddings:vertex:extract_embeddings
```
#### Use embeddings in specs
The `seed` Rake Task populates the development database with embeddings for all GitLab
Documentation. The `extract_embeddings` Rake Task populates a fixture file with a subset
of embeddings.
The set of questions listed in the Rake Task itself determines
which embeddings are pulled into the fixture file. For example, one of the
questions is "How can I reset my password?" The `extract_embeddings` Task
pulls the most relevant embeddings for this question from the development
database (which has data from the `seed` Rake Task) and saves those embeddings
in `ee/spec/fixtures/vertex_embeddings`. This fixture is used in tests related
to embeddings.
If you would like to change any of the questions supported in embeddings specs,
update and re-run the `extract_embeddings` Rake Task.
In the specs where you need to use the embeddings,
use the RSpec `:ai_embedding_fixtures` metadata.
```ruby
context 'when asking about how to use GitLab', :ai_embedding_fixtures do
# ...examples
end
```
### Tips for local development
1. When responses are taking too long to appear in the user interface, consider restarting Sidekiq by running `gdk restart rails-background-jobs`. If that doesn't work, try `gdk kill` and then `gdk start`.
1. Alternatively, bypass Sidekiq entirely and run the chat service synchronously. This can help with debugging errors as GraphQL errors are now available in the network inspector instead of the Sidekiq logs. To do that temporary alter `Llm::CompletionWorker.perform_async` statements with `Llm::CompletionWorker.perform_inline`
### Working with GitLab Duo Chat
View [guidelines](duo_chat.md) for working with GitLab Duo Chat.
## Test AI features with AI Gateway locally
> - [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/11251) in GitLab 16.8.
@ -79,15 +163,7 @@ the feature must request to the [AI Gateway](../../architecture/blueprints/ai_ga
Gitlab::Llm::AiGateway::Client.new(User.first).stream(prompt: "\n\nHuman: Hi, how are you?\n\nAssistant:")
```
**Additional setup for testing subscriptions** (***not required for DuoChat setup***)
1. Setup CustomersDot:
1. Install CustomersDot: [internal video tutorial](https://youtu.be/_8wOMa_yGSw) (replace inactive subscription plan ID URL provided in the video caption with an active one from the link containing plan ids below)
- This video loosely follows [official installation steps](https://gitlab.com/gitlab-org/customers-gitlab-com/-/blob/main/doc/setup/installation_steps.md)
- It also offers guidance on how to create a self-managed subscription. You will receive a *cloud activation code* in return.
- A list of subscription plan ids are available [here](https://gitlab.com/gitlab-org/customers-gitlab-com/-/blob/main/doc/flows/buy_subscription.md) for creating a Self-Managed Subscription locally.
#### Verify the setup with GraphQL
### Verify the setup with GraphQL
1. Visit [GraphQL explorer](../../api/graphql/index.md#interactive-graphql-explorer).
1. Execute the `aiAction` mutation. Here is an example:
@ -142,7 +218,7 @@ it will print useful error messages with links to the docs on how to resolve the
GITLAB_SIMULATE_SAAS=1 RAILS_ENV=development bundle exec rake 'gitlab:duo:setup[<test-group-name>]'
```
[AI Gateway](#local-setup) still needs to be setup when using the automated setup.
[AI Gateway](#set-up) still needs to be setup when using the automated setup.
**Manual way**
@ -165,7 +241,7 @@ GITLAB_SIMULATE_SAAS=1 RAILS_ENV=development bundle exec rake 'gitlab:duo:setup[
1. Enable **Experiment & Beta features**.
1. Enable the specific feature flag for the feature you want to test.
1. You can use Rake task `rake gitlab:duo:enable_feature_flags` to enable all feature flags that are assigned to group AI Framework.
1. Setup [AI Gateway](#local-setup).
1. Setup [AI Gateway](#set-up).
### Help

View File

@ -56,6 +56,7 @@ Refer to [CustomersDot](#customersdot) to see how custom claims are handled for
- `X-Gitlab-Instance-Id`: A globally unique instance ID string.
- `X-Gitlab-Global-User-Id`: A globally unique anonymous user ID string.
- `X-Gitlab-Realm`: One of `saas`, `self-managed`.
- `X-Gitlab-Version`: Version of the GitLab instance.
- `Authorization`: Contains the Base64-encoded JWT as a `Bearer` token obtained from the `access_token` method in step 1.
Some of these headers can be injected by merging the result of the `API::Helpers::CloudConnector#cloud_connector_headers`
@ -280,4 +281,4 @@ and assign it to the Cloud Connector group.
## Testing
An example for how to set up an end-to-end integration with the AI gateway as the backend service can be found [here](../ai_features/index.md#local-setup).
An example for how to set up an end-to-end integration with the AI gateway as the backend service can be found [here](../ai_features/index.md#set-up).

View File

@ -292,7 +292,7 @@ If the existing sections do not meet your requirements for UI customization, you
module Integrations
class FooBar < Integration
SECTION_TYPE_SUPER = :my_custom_section
def sections
[
{
@ -353,7 +353,12 @@ end
To expose the integration in the [REST API](../../api/integrations.md):
1. Add the integration's class (`::Integrations::FooBar`) to `API::Helpers::IntegrationsHelpers.integration_classes`.
1. Add all properties that should be exposed to `API::Helpers::IntegrationsHelpers.integrations`.
1. Add the integration's API arguments to `API::Helpers::IntegrationsHelpers.integrations`, for example:
```ruby
'foo-bar' => ::Integrations::FooBar.api_arguments
```
1. Update the reference documentation in `doc/api/integrations.md`, add a new section for your integration, and document all properties.
You can also refer to our [REST API style guide](../api_styleguide.md).

View File

@ -54,6 +54,7 @@ such as the value of a setting or the count of rows in a database table.
## Instrumentation
- To create an instrumentation plan, use this [template](https://gitlab.com/gitlab-org/gitlab/-/issues/new?issuable_template=Usage+Data+Instrumentation).
- To instrument an event-based metric, see the [internal event tracking quick start guide](internal_event_instrumentation/quick_start.md).
- To instrument a metric that observes the GitLab instances state, see [the metrics instrumentation](metrics/metrics_instrumentation.md).

View File

@ -383,11 +383,7 @@ expires_at_date = "2024-05-22"
PersonalAccessToken.project_access_token.where(expires_at: expires_at_date).find_each do |token|
token.user.members.each do |member|
type = if member.is_a?(GroupMember)
'Group'
elsif member.is_a?(ProjectMember)
'Project'
end
type = member.is_a?(GroupMember) ? 'Group' : 'Project'
puts "Expired #{type} access token in #{type} ID #{member.source_id}, Token ID: #{token.id}, Name: #{token.name}, Scopes: #{token.scopes}, Last used: #{token.last_used_at}"
end
@ -411,14 +407,14 @@ the exact date your instance was upgraded to GitLab 16.0. To use it:
1. In your terminal window, connect to your instance.
1. Copy this entire script, and save it as a file on your instance:
- Name it `expired_tokens.rb`.
- Name it `expired_tokens_date_range.rb`.
- If desired, change the `date_range` to a different range.
- The file must be accessible to `git:git`.
1. Run this command, changing `/path/to/expired_tokens.rb`
to the _full_ path to your `expired_tokens.rb` file:
1. Run this command, changing `/path/to/expired_tokens_date_range.rb`
to the _full_ path to your `expired_tokens_date_range.rb` file:
```shell
sudo gitlab-rails runner /path/to/expired_tokens.rb
sudo gitlab-rails runner /path/to/expired_tokens_date_range.rb
```
For more information, see the [Rails Runner troubleshooting section](../administration/operations/rails_console.md#troubleshooting).

View File

@ -14,3 +14,5 @@ To instrument an application to send events to GitLab product analytics you can
- [Node SDK](https://gitlab.com/gitlab-org/analytics-section/product-analytics/gl-application-sdk-node)
- [.NET SDK](https://gitlab.com/gitlab-org/analytics-section/product-analytics/gl-application-sdk-dotnet)
- [Snowplow SDK](https://docs.snowplow.io/docs/collecting-data/collecting-from-own-applications/) is compatible, but not supported by GitLab.
If you are interested in other SDKs to be supported, please comment in [issue 391970](https://gitlab.com/gitlab-org/gitlab/-/issues/391970).

View File

@ -135,6 +135,17 @@ Prerequisites:
Use the groups API to [rotate the personal access token](../../api/groups.md#rotate-a-personal-access-token-for-service-account-user) for a service account user.
### Revoke a personal access token
Prerequisites:
- You must be signed in as the service account user.
To revoke a personal access token, use the [personal access tokens API](../../api/personal_access_tokens.md#revoke-a-personal-access-token). You can use either of the following methods:
- Use a [personal access token ID](../../api/personal_access_tokens.md#using-a-personal-access-token-id-1). The token used to perform the revocation must have the [`admin_mode`](personal_access_tokens.md#personal-access-token-scopes) scope.
- Use a [request header](../../api/personal_access_tokens.md#using-a-request-header-1). The token used to perform the request is revoked.
### Delete a service account
Prerequisites:
@ -159,8 +170,6 @@ If you are not an administrator for the instance or group a service account is a
For more information, see the [API documentation on removing a member from a group or project](../../api/members.md#remove-a-member-from-a-group-or-project).
1. Revoke the personal access token by using the [API](../../api/personal_access_tokens.md#revoke-a-personal-access-token).
## Related topics
- [Billable users](../../subscriptions/self_managed/index.md#billable-users)

View File

@ -1551,7 +1551,7 @@
</p></pre>
</td></tr></table>
wysiwyg: |-
<table><tbody><tr><td colspan="1" rowspan="1"><pre><p dir="auto">**Hello**,
<table style="minWidth: 25px"><colgroup><col></colgroup><tbody><tr><td colspan="1" rowspan="1"><pre><p dir="auto">**Hello**,
</p><p dir="auto"><em>world</em>.
</p></pre><p dir="auto"></p></td></tr></tbody></table>
04_06_00__leaf_blocks__html_blocks__002:
@ -1574,7 +1574,7 @@
</table>
<p data-sourcepos="9:1-9:5" dir="auto">okay.</p>
wysiwyg: |-
<table><tbody><tr><td colspan="1" rowspan="1"><p dir="auto">
<table style="minWidth: 25px"><colgroup><col></colgroup><tbody><tr><td colspan="1" rowspan="1"><p dir="auto">
hi
</p></td></tr></tbody></table>
<p dir="auto">okay.</p>
@ -1688,7 +1688,7 @@
foo
</td></tr></table>
wysiwyg: |-
<table><tbody><tr><td colspan="1" rowspan="1"><p dir="auto">
<table style="minWidth: 25px"><colgroup><col></colgroup><tbody><tr><td colspan="1" rowspan="1"><p dir="auto">
foo
</p></td></tr></tbody></table>
04_06_00__leaf_blocks__html_blocks__014:
@ -2093,7 +2093,7 @@
</tr>
</table>
wysiwyg: |-
<table><tbody><tr><td colspan="1" rowspan="1"><p dir="auto">
<table style="minWidth: 25px"><colgroup><col></colgroup><tbody><tr><td colspan="1" rowspan="1"><p dir="auto">
Hi
</p></td></tr></tbody></table>
04_06_00__leaf_blocks__html_blocks__043:
@ -2121,7 +2121,7 @@
<pre dir="auto" class="content-editor-code-block undefined code highlight"><code>&lt;td&gt;
Hi
&lt;/td&gt;</code></pre>
<table><tbody><tr></tr></tbody></table>
<table style="width: 0px"><colgroup></colgroup><tbody><tr></tr></tbody></table>
04_07_00__leaf_blocks__link_reference_definitions__001:
canonical: |
<p><a href="/url" title="title">foo</a></p>
@ -2546,7 +2546,7 @@
</tbody>
</table>
wysiwyg: |-
<table><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">foo</p></th><th colspan="1" rowspan="1"><p dir="auto">bar</p></th></tr><tr><td colspan="1" rowspan="1"><p dir="auto">baz</p></td><td colspan="1" rowspan="1"><p dir="auto">bim</p></td></tr></tbody></table>
<table style="minWidth: 50px"><colgroup><col><col></colgroup><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">foo</p></th><th colspan="1" rowspan="1"><p dir="auto">bar</p></th></tr><tr><td colspan="1" rowspan="1"><p dir="auto">baz</p></td><td colspan="1" rowspan="1"><p dir="auto">bim</p></td></tr></tbody></table>
04_10_00__leaf_blocks__tables_extension__002:
canonical: |
<table>
@ -2579,7 +2579,7 @@
</tbody>
</table>
wysiwyg: |-
<table><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">abc</p></th><th colspan="1" rowspan="1"><p dir="auto">defghi</p></th></tr><tr><td colspan="1" rowspan="1"><p dir="auto">bar</p></td><td colspan="1" rowspan="1"><p dir="auto">baz</p></td></tr></tbody></table>
<table style="minWidth: 50px"><colgroup><col><col></colgroup><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">abc</p></th><th colspan="1" rowspan="1"><p dir="auto">defghi</p></th></tr><tr><td colspan="1" rowspan="1"><p dir="auto">bar</p></td><td colspan="1" rowspan="1"><p dir="auto">baz</p></td></tr></tbody></table>
04_10_00__leaf_blocks__tables_extension__003:
canonical: |
<table>
@ -2614,7 +2614,7 @@
</tbody>
</table>
wysiwyg: |-
<table><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">f|oo</p></th></tr><tr><td colspan="1" rowspan="1"><p dir="auto">b <code>|</code> az</p></td></tr><tr><td colspan="1" rowspan="1"><p dir="auto">b <strong>|</strong> im</p></td></tr></tbody></table>
<table style="minWidth: 25px"><colgroup><col></colgroup><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">f|oo</p></th></tr><tr><td colspan="1" rowspan="1"><p dir="auto">b <code>|</code> az</p></td></tr><tr><td colspan="1" rowspan="1"><p dir="auto">b <strong>|</strong> im</p></td></tr></tbody></table>
04_10_00__leaf_blocks__tables_extension__004:
canonical: |
<table>
@ -2653,7 +2653,7 @@
<p data-sourcepos="4:3-4:5">bar</p>
</blockquote>
wysiwyg: |-
<table><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">abc</p></th><th colspan="1" rowspan="1"><p dir="auto">def</p></th></tr><tr><td colspan="1" rowspan="1"><p dir="auto">bar</p></td><td colspan="1" rowspan="1"><p dir="auto">baz</p></td></tr></tbody></table>
<table style="minWidth: 50px"><colgroup><col><col></colgroup><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">abc</p></th><th colspan="1" rowspan="1"><p dir="auto">def</p></th></tr><tr><td colspan="1" rowspan="1"><p dir="auto">bar</p></td><td colspan="1" rowspan="1"><p dir="auto">baz</p></td></tr></tbody></table>
<blockquote dir="auto" multiline="false"><p dir="auto">bar</p></blockquote>
04_10_00__leaf_blocks__tables_extension__005:
canonical: |
@ -2697,7 +2697,7 @@
</table>
<p data-sourcepos="6:1-6:3" dir="auto">bar</p>
wysiwyg: |-
<table><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">abc</p></th><th colspan="1" rowspan="1"><p dir="auto">def</p></th></tr><tr><td colspan="1" rowspan="1"><p dir="auto">bar</p></td><td colspan="1" rowspan="1"><p dir="auto">baz</p></td></tr><tr><td colspan="1" rowspan="1"><p dir="auto">bar</p></td><td colspan="1" rowspan="1"><p dir="auto"></p></td></tr></tbody></table>
<table style="minWidth: 50px"><colgroup><col><col></colgroup><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">abc</p></th><th colspan="1" rowspan="1"><p dir="auto">def</p></th></tr><tr><td colspan="1" rowspan="1"><p dir="auto">bar</p></td><td colspan="1" rowspan="1"><p dir="auto">baz</p></td></tr><tr><td colspan="1" rowspan="1"><p dir="auto">bar</p></td><td colspan="1" rowspan="1"><p dir="auto"></p></td></tr></tbody></table>
<p dir="auto">bar</p>
04_10_00__leaf_blocks__tables_extension__006:
canonical: |
@ -2752,7 +2752,7 @@
</tbody>
</table>
wysiwyg: |-
<table><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">abc</p></th><th colspan="1" rowspan="1"><p dir="auto">def</p></th></tr><tr><td colspan="1" rowspan="1"><p dir="auto">bar</p></td><td colspan="1" rowspan="1"><p dir="auto"></p></td></tr><tr><td colspan="1" rowspan="1"><p dir="auto">bar</p></td><td colspan="1" rowspan="1"><p dir="auto">baz</p></td></tr></tbody></table>
<table style="minWidth: 50px"><colgroup><col><col></colgroup><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">abc</p></th><th colspan="1" rowspan="1"><p dir="auto">def</p></th></tr><tr><td colspan="1" rowspan="1"><p dir="auto">bar</p></td><td colspan="1" rowspan="1"><p dir="auto"></p></td></tr><tr><td colspan="1" rowspan="1"><p dir="auto">bar</p></td><td colspan="1" rowspan="1"><p dir="auto">baz</p></td></tr></tbody></table>
04_10_00__leaf_blocks__tables_extension__008:
canonical: |
<table>
@ -2773,7 +2773,7 @@
</thead>
</table>
wysiwyg: |-
<table><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">abc</p></th><th colspan="1" rowspan="1"><p dir="auto">def</p></th></tr></tbody></table>
<table style="minWidth: 50px"><colgroup><col><col></colgroup><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">abc</p></th><th colspan="1" rowspan="1"><p dir="auto">def</p></th></tr></tbody></table>
05_01_00__container_blocks__block_quotes__001:
canonical: |
<blockquote>
@ -8519,7 +8519,7 @@
<h1 data-sourcepos="6:1-6:21" dir="auto">
<a href="#content-after-table" aria-hidden="true" class="anchor" id="user-content-content-after-table"></a>content after table</h1>
wysiwyg: |-
<table><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">header</p></th><th colspan="1" rowspan="1"><p dir="auto">header</p></th></tr><tr><td colspan="1" rowspan="1"><p dir="auto"><code>code</code></p></td><td colspan="1" rowspan="1"><p dir="auto">cell with <strong>bold</strong></p></td></tr><tr><td colspan="1" rowspan="1"><p dir="auto"><s>strike</s></p></td><td colspan="1" rowspan="1"><p dir="auto">cell with <em>italic</em></p></td></tr></tbody></table>
<table style="minWidth: 50px"><colgroup><col><col></colgroup><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">header</p></th><th colspan="1" rowspan="1"><p dir="auto">header</p></th></tr><tr><td colspan="1" rowspan="1"><p dir="auto"><code>code</code></p></td><td colspan="1" rowspan="1"><p dir="auto">cell with <strong>bold</strong></p></td></tr><tr><td colspan="1" rowspan="1"><p dir="auto"><s>strike</s></p></td><td colspan="1" rowspan="1"><p dir="auto">cell with <em>italic</em></p></td></tr></tbody></table>
<h1 dir="auto">content after table</h1>
08_04_49__gitlab_internal_extension_markdown__migrated_golden_master_examples__table_of_contents__001:
canonical: |
@ -8813,7 +8813,7 @@
</tbody>
</table>
wysiwyg: |-
<table><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">a</p></th><th colspan="1" rowspan="1"><p dir="auto">b</p></th></tr><tr><td colspan="1" rowspan="1"><p dir="auto"><a target="_blank" rel="noopener noreferrer nofollow" href="https://github.com">https://github.com</a><a target="_blank" rel="noopener noreferrer nofollow" href="http://www.github.com">www.github.com</a></p></td><td colspan="1" rowspan="1"><p dir="auto"><a target="_blank" rel="noopener noreferrer nofollow" href="http://pokemon.com">http://pokemon.com</a></p></td></tr></tbody></table>
<table style="minWidth: 50px"><colgroup><col><col></colgroup><tbody><tr><th colspan="1" rowspan="1"><p dir="auto">a</p></th><th colspan="1" rowspan="1"><p dir="auto">b</p></th></tr><tr><td colspan="1" rowspan="1"><p dir="auto"><a target="_blank" rel="noopener noreferrer nofollow" href="https://github.com">https://github.com</a><a target="_blank" rel="noopener noreferrer nofollow" href="http://www.github.com">www.github.com</a></p></td><td colspan="1" rowspan="1"><p dir="auto"><a target="_blank" rel="noopener noreferrer nofollow" href="http://pokemon.com">http://pokemon.com</a></p></td></tr></tbody></table>
09_04_00__gfm_undocumented_extensions_and_more_robust_test__task_lists__001:
canonical: |
<ul>

View File

@ -55,12 +55,7 @@ module API
builds = user_project.builds.order(id: :desc)
builds = filter_builds(builds, params[:scope])
builds = builds.preload(
:job_artifacts_archive, :job_artifacts, :runner, :tags, :runner_manager, :metadata,
pipeline: :project,
user: [:user_preference, :user_detail, :followees]
)
builds = builds.eager_load_for_api
present paginate_with_strategies(builds, user_project, paginator_params: { without_count: true }), with: Entities::Ci::Job
end

View File

@ -41,87 +41,78 @@ module API
end
end
expose :packages_enabled, documentation: { type: 'boolean' }
expose :empty_repo?, as: :empty_repo, documentation: { type: 'boolean' }
expose :archived?, as: :archived, documentation: { type: 'boolean' }
expose :visibility, documentation: { type: 'string', example: 'public' }
expose :owner, using: Entities::UserBasic, unless: ->(project, options) { project.group }
expose :resolve_outdated_diff_discussions, documentation: { type: 'boolean' }
expose :container_expiration_policy,
using: Entities::ContainerExpirationPolicy,
if: ->(project, _) { project.container_expiration_policy }
expose :repository_object_format, documentation: { type: 'string', example: 'sha1' }
# Expose old field names with the new permissions methods to keep API compatible
# TODO: remove in API v5, replaced by *_access_level
expose(:issues_enabled, documentation: { type: 'boolean' }) { |project, options| project.feature_available?(:issues, options[:current_user]) }
expose(:merge_requests_enabled, documentation: { type: 'boolean' }) { |project, options| project.feature_available?(:merge_requests, options[:current_user]) }
expose(:wiki_enabled, documentation: { type: 'boolean' }) { |project, options| project.feature_available?(:wiki, options[:current_user]) }
expose(:jobs_enabled, documentation: { type: 'boolean' }) { |project, options| project.feature_available?(:builds, options[:current_user]) }
expose(:snippets_enabled, documentation: { type: 'boolean' }) { |project, options| project.feature_available?(:snippets, options[:current_user]) }
expose(:container_registry_enabled, documentation: { type: 'boolean' }) { |project, options| project.feature_available?(:container_registry, options[:current_user]) }
expose :service_desk_enabled, documentation: { type: 'boolean' }
expose :service_desk_address, documentation: { type: 'string', example: 'address@example.com' }, if: ->(project, options) do
Ability.allowed?(options[:current_user], :admin_issue, project)
end
expose :open_issues_count, documentation: { type: 'integer', example: 1 }, if: lambda { |project, options| project.feature_available?(:issues, options[:current_user]) }
expose :description_html, documentation: { type: 'string' }
expose :updated_at, documentation: { type: 'dateTime', example: '2020-05-07T04:27:17.016Z' }
expose(:can_create_merge_request_in, documentation: { type: 'boolean' }) do |project, options|
Ability.allowed?(options[:current_user], :create_merge_request_in, project)
end
expose(:issues_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :issues) }
expose(:repository_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :repository) }
expose(:merge_requests_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :merge_requests) }
expose(:forking_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :forking) }
expose(:wiki_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :wiki) }
expose(:builds_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :builds) }
expose(:snippets_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :snippets) }
expose(:pages_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :pages) }
expose(:analytics_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :analytics) }
expose(:container_registry_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :container_registry) }
expose(:security_and_compliance_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :security_and_compliance) }
expose(:releases_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :releases) }
expose(:environments_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :environments) }
expose(:feature_flags_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :feature_flags) }
expose(:infrastructure_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :infrastructure) }
expose(:monitor_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :monitor) }
expose(:model_experiments_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :model_experiments) }
expose(:model_registry_access_level, documentation: { type: 'string', example: 'enabled' }) { |project, options| project_feature_string_access_level(project, :model_registry) }
expose(:emails_disabled, documentation: { type: 'boolean' }) { |project, options| project.emails_disabled? }
expose :emails_enabled, documentation: { type: 'boolean' }
expose :shared_runners_enabled, documentation: { type: 'boolean' }
expose :lfs_enabled?, as: :lfs_enabled, documentation: { type: 'boolean' }
expose :creator_id, documentation: { type: 'integer', example: 1 }
expose :forked_from_project, using: Entities::BasicProjectDetails, if: ->(project, options) do
project.forked? && Ability.allowed?(options[:current_user], :read_project, project.forked_from_project)
end
expose :statistics, using: 'API::Entities::ProjectStatistics', if: ->(project, options) {
options[:statistics] && Ability.allowed?(options[:current_user], :read_statistics, project)
}
expose :ci_config_path, documentation: { type: 'string', example: '' }, if: ->(project, options) {
Ability.allowed?(options[:current_user], :read_code, project)
}
expose :mr_default_target_self, if: ->(project) { project.forked? }, documentation: { type: 'boolean' }
expose :import_url, documentation: { type: 'string', example: 'https://gitlab.com/gitlab/gitlab.git' }, if: ->(project, options) { Ability.allowed?(options[:current_user], :admin_project, project) } do |project|
project[:import_url]
end
expose :import_type, documentation: { type: 'string', example: 'git' }, if: ->(project, options) { Ability.allowed?(options[:current_user], :admin_project, project) }
expose :import_status, documentation: { type: 'string', example: 'none' }
expose :import_error, documentation: { type: 'string', example: 'Import error' }, if: lambda { |_project, options| options[:user_can_admin_project] } do |project|
project.import_state&.last_error
end
expose :open_issues_count, documentation: { type: 'integer', example: 1 }, if: lambda { |project, options| project.feature_available?(:issues, options[:current_user]) }
expose :description_html, documentation: { type: 'string' }
expose :updated_at, documentation: { type: 'dateTime', example: '2020-05-07T04:27:17.016Z' }
expose :shared_with_groups, documentation: { is_array: true } do |project, options|
user = options[:current_user]
SharedGroupWithProject.represent(project.visible_group_links(for_user: user), options)
end
expose :service_desk_address, documentation: { type: 'string', example: 'address@example.com' }, if: ->(project, options) do
Ability.allowed?(options[:current_user], :admin_issue, project)
end
with_options if: ->(_, _) { user_can_admin_project? } do
expose :emails_disabled?, as: :emails_disabled, documentation: { type: 'boolean' }
expose :emails_enabled, documentation: { type: 'boolean' }
expose :resolve_outdated_diff_discussions, documentation: { type: 'boolean' }
expose :container_expiration_policy,
using: Entities::ContainerExpirationPolicy,
if: ->(project, _) { project.container_expiration_policy }
expose :repository_object_format, documentation: { type: 'string', example: 'sha1' }
expose :shared_runners_enabled, documentation: { type: 'boolean' }
expose :lfs_enabled?, as: :lfs_enabled, documentation: { type: 'boolean' }
expose :creator_id, documentation: { type: 'integer', example: 1 }
expose :import_url, documentation: { type: 'string', example: 'https://gitlab.com/gitlab/gitlab.git' } do |project|
project[:import_url]
end
expose :import_type, documentation: { type: 'string', example: 'git' }
expose :import_status, documentation: { type: 'string', example: 'none' }
expose :import_error, documentation: { type: 'string', example: 'Import error' } do |project|
project.import_state&.last_error
end
with_options if: ->(_, _) { Ability.allowed?(options[:current_user], :admin_project, project) } do
# CI/CD Settings
expose :ci_default_git_depth, documentation: { type: 'integer', example: 20 }
expose :ci_forward_deployment_enabled, documentation: { type: 'boolean' }
expose :ci_forward_deployment_rollback_allowed, documentation: { type: 'boolean' }
expose(:ci_job_token_scope_enabled, documentation: { type: 'boolean' }) { |p, _| p.ci_outbound_job_token_scope_enabled? }
expose :ci_outbound_job_token_scope_enabled?, as: :ci_job_token_scope_enabled, documentation: { type: 'boolean' }
expose :ci_separated_caches, documentation: { type: 'boolean' }
expose :ci_allow_fork_pipelines_to_run_in_parent_project, documentation: { type: 'boolean' }
expose :build_git_strategy, documentation: { type: 'string', example: 'fetch' } do |project, options|
expose :build_git_strategy, documentation: { type: 'string', example: 'fetch' } do |project|
project.build_allow_git_fetch ? 'fetch' : 'clone'
end
expose :keep_latest_artifacts_available?, as: :keep_latest_artifact, documentation: { type: 'boolean' }
expose :restrict_user_defined_variables, documentation: { type: 'boolean' }
expose :runners_token, documentation: { type: 'string', example: 'b8547b1dc37721d05889db52fa2f02' }
@ -130,40 +121,133 @@ module API
expose :auto_cancel_pending_pipelines, documentation: { type: 'string', example: 'enabled' }
expose :build_timeout, documentation: { type: 'integer', example: 3600 }
expose :auto_devops_enabled?, as: :auto_devops_enabled, documentation: { type: 'boolean' }
expose :auto_devops_deploy_strategy, documentation: { type: 'string', example: 'continuous' } do |project, options|
expose :auto_devops_deploy_strategy, documentation: { type: 'string', example: 'continuous' } do |project|
project.auto_devops.nil? ? 'continuous' : project.auto_devops.deploy_strategy
end
expose :public_builds, as: :public_jobs, documentation: { type: 'boolean' }
expose :only_allow_merge_if_pipeline_succeeds, documentation: { type: 'boolean' }
expose :allow_merge_on_skipped_pipeline, documentation: { type: 'boolean' }
expose :request_access_enabled, documentation: { type: 'boolean' }
expose :only_allow_merge_if_all_discussions_are_resolved, documentation: { type: 'boolean' }
expose :remove_source_branch_after_merge, documentation: { type: 'boolean' }
expose :printing_merge_request_link_enabled, documentation: { type: 'boolean' }
expose :merge_method, documentation: { type: 'string', example: 'merge' }
expose :squash_option, documentation: { type: 'string', example: 'default_off' }
expose :enforce_auth_checks_on_uploads, documentation: { type: 'boolean' }
expose :suggestion_commit_message, documentation: { type: 'string', example: 'Suggestion message' }
expose :merge_commit_template, documentation: { type: 'string', example: '%(title)' }
expose :squash_commit_template, documentation: { type: 'string', example: '%(source_branch)' }
expose :issue_branch_template, documentation: { type: 'string', example: '%(title)' }
expose :warn_about_potentially_unwanted_characters, documentation: { type: 'boolean' }
expose :autoclose_referenced_issues, documentation: { type: 'boolean' }
# Expose old field names with the new permissions methods to keep API compatible
# TODO: remove in API v5, replaced by *_access_level
expose :packages_enabled, documentation: { type: 'boolean' }
expose :service_desk_enabled, documentation: { type: 'boolean' }
expose :issues_enabled, documentation: { type: 'boolean' } do |project, options|
project.feature_available?(:issues, options[:current_user])
end
expose :merge_requests_enabled, documentation: { type: 'boolean' } do |project, options|
project.feature_available?(:merge_requests, options[:current_user])
end
expose :wiki_enabled, documentation: { type: 'boolean' } do |project, options|
project.feature_available?(:wiki, options[:current_user])
end
expose :jobs_enabled, documentation: { type: 'boolean' } do |project, options|
project.feature_available?(:builds, options[:current_user])
end
expose :snippets_enabled, documentation: { type: 'boolean' } do |project, options|
project.feature_available?(:snippets, options[:current_user])
end
expose :container_registry_enabled, documentation: { type: 'boolean' } do |project, options|
project.feature_available?(:container_registry, options[:current_user])
end
# Visibility, project features, permissions settings
expose :issues_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :issues)
end
expose :repository_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :repository)
end
expose :merge_requests_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :merge_requests)
end
expose :forking_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :forking)
end
expose :wiki_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :wiki)
end
expose :builds_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :builds)
end
expose :snippets_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :snippets)
end
expose :pages_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :pages)
end
expose :analytics_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :analytics)
end
expose :container_registry_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :container_registry)
end
expose :security_and_compliance_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :security_and_compliance)
end
expose :releases_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :releases)
end
expose :environments_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :environments)
end
expose :feature_flags_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :feature_flags)
end
expose :infrastructure_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :infrastructure)
end
expose :monitor_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :monitor)
end
expose :model_experiments_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :model_experiments)
end
expose :model_registry_access_level, documentation: { type: 'string', example: 'enabled' } do |project|
project_feature_string_access_level(project, :model_registry)
end
end
expose :ci_config_path, documentation: { type: 'string', example: '' }, if: ->(project, options) { Ability.allowed?(options[:current_user], :read_code, project) }
expose :public_builds, as: :public_jobs, documentation: { type: 'boolean' }
expose :shared_with_groups, documentation: { is_array: true } do |project, options|
user = options[:current_user]
SharedGroupWithProject.represent(project.visible_group_links(for_user: user), options)
end
expose :only_allow_merge_if_pipeline_succeeds, documentation: { type: 'boolean' }
expose :allow_merge_on_skipped_pipeline, documentation: { type: 'boolean' }
expose :request_access_enabled, documentation: { type: 'boolean' }
expose :only_allow_merge_if_all_discussions_are_resolved, documentation: { type: 'boolean' }
expose :remove_source_branch_after_merge, documentation: { type: 'boolean' }
expose :printing_merge_request_link_enabled, documentation: { type: 'boolean' }
expose :merge_method, documentation: { type: 'string', example: 'merge' }
expose :squash_option, documentation: { type: 'string', example: 'default_off' }
expose :enforce_auth_checks_on_uploads, documentation: { type: 'boolean' }
expose :suggestion_commit_message, documentation: { type: 'string', example: 'Suggestion message' }
expose :merge_commit_template, documentation: { type: 'string', example: '%(title)' }
expose :squash_commit_template, documentation: { type: 'string', example: '%(source_branch)' }
expose :issue_branch_template, documentation: { type: 'string', example: '%(title)' }
expose :statistics, using: 'API::Entities::ProjectStatistics', if: ->(project, options) {
options[:statistics] && Ability.allowed?(options[:current_user], :read_statistics, project)
}
expose :warn_about_potentially_unwanted_characters, documentation: { type: 'boolean' }
expose :autoclose_referenced_issues, documentation: { type: 'boolean' }
# rubocop: disable CodeReuse/ActiveRecord
def self.preload_resource(project)
ActiveRecord::Associations::Preloader.new(records: [project], associations: { project_group_links: { group: :route } }).call
@ -200,6 +284,10 @@ module API
def self.repositories_for_preload(projects_relation)
super + projects_relation.map(&:forked_from_project).compact.map(&:repository)
end
def user_can_admin_project?
Ability.allowed?(options[:current_user], :admin_project, project)
end
end
end
end

View File

@ -97,11 +97,11 @@ module API
def self.integrations
{
'apple-app-store' => ::Integrations::AppleAppStore.api_fields,
'asana' => ::Integrations::Asana.api_fields,
'assembla' => ::Integrations::Assembla.api_fields,
'bamboo' => ::Integrations::Bamboo.api_fields,
'bugzilla' => ::Integrations::Bugzilla.api_fields,
'apple-app-store' => ::Integrations::AppleAppStore.api_arguments,
'asana' => ::Integrations::Asana.api_arguments,
'assembla' => ::Integrations::Assembla.api_arguments,
'bamboo' => ::Integrations::Bamboo.api_arguments,
'bugzilla' => ::Integrations::Bugzilla.api_arguments,
'buildkite' => [
{
required: true,
@ -122,9 +122,9 @@ module API
desc: 'DEPRECATED: This parameter has no effect since SSL verification will always be enabled'
}
],
'campfire' => ::Integrations::Campfire.api_fields,
'confluence' => ::Integrations::Confluence.api_fields,
'custom-issue-tracker' => ::Integrations::CustomIssueTracker.api_fields,
'campfire' => ::Integrations::Campfire.api_arguments,
'confluence' => ::Integrations::Confluence.api_arguments,
'custom-issue-tracker' => ::Integrations::CustomIssueTracker.api_arguments,
'datadog' => [
{
required: true,
@ -169,9 +169,9 @@ module API
desc: 'Custom tags in Datadog. Specify one tag per line in the format: "key:value\nkey2:value2"'
}
],
'diffblue-cover' => ::Integrations::DiffblueCover.api_fields,
'diffblue-cover' => ::Integrations::DiffblueCover.api_arguments,
'discord' => [
::Integrations::Discord.api_fields,
::Integrations::Discord.api_arguments,
chat_notification_flags,
chat_notification_channels
].flatten,
@ -221,12 +221,12 @@ module API
desc: 'Branches for which notifications are to be sent'
}
],
'external-wiki' => ::Integrations::ExternalWiki.api_fields,
'external-wiki' => ::Integrations::ExternalWiki.api_arguments,
'gitlab-slack-application' => [
::Integrations::GitlabSlackApplication.api_fields,
::Integrations::GitlabSlackApplication.api_arguments,
chat_notification_channels
].flatten,
'google-play' => ::Integrations::GooglePlay.api_fields,
'google-play' => ::Integrations::GooglePlay.api_arguments,
'hangouts-chat' => [
{
required: true,
@ -241,7 +241,7 @@ module API
desc: 'Branches for which notifications are to be sent'
}
].flatten,
'harbor' => ::Integrations::Harbor.api_fields,
'harbor' => ::Integrations::Harbor.api_arguments,
'irker' => [
{
required: true,
@ -380,7 +380,7 @@ module API
desc: 'Enable comments inside Jira issues on each GitLab event (commit / merge request)'
}
],
'mattermost-slash-commands' => ::Integrations::MattermostSlashCommands.api_fields,
'mattermost-slash-commands' => ::Integrations::MattermostSlashCommands.api_arguments,
'slack-slash-commands' => [
{
required: true,
@ -409,7 +409,7 @@ module API
desc: 'The server'
}
],
'phorge' => ::Integrations::Phorge.api_fields,
'phorge' => ::Integrations::Phorge.api_arguments,
'pipelines-email' => [
{
required: true,
@ -516,12 +516,12 @@ module API
desc: 'The sound of the notification'
}
],
'redmine' => ::Integrations::Redmine.api_fields,
'ewm' => ::Integrations::Ewm.api_fields,
'youtrack' => ::Integrations::Youtrack.api_fields,
'clickup' => ::Integrations::Clickup.api_fields,
'redmine' => ::Integrations::Redmine.api_arguments,
'ewm' => ::Integrations::Ewm.api_arguments,
'youtrack' => ::Integrations::Youtrack.api_arguments,
'clickup' => ::Integrations::Clickup.api_arguments,
'slack' => [
::Integrations::Slack.api_fields,
::Integrations::Slack.api_arguments,
chat_notification_channels
].flatten,
'microsoft-teams' => [
@ -540,7 +540,7 @@ module API
chat_notification_flags
].flatten,
'mattermost' => [
::Integrations::Mattermost.api_fields,
::Integrations::Mattermost.api_arguments,
chat_notification_channels
].flatten,
'teamcity' => [
@ -616,7 +616,7 @@ module API
desc: 'The Unify Circuit webhook. e.g. https://circuit.com/rest/v2/webhooks/incoming/…'
}
].flatten,
'webex-teams' => ::Integrations::WebexTeams.api_fields,
'webex-teams' => ::Integrations::WebexTeams.api_arguments,
'zentao' => [
{
required: true,
@ -643,7 +643,7 @@ module API
desc: 'The product ID of ZenTao project'
}
],
'squash-tm' => ::Integrations::SquashTm.api_fields
'squash-tm' => ::Integrations::SquashTm.api_arguments
}
end

View File

@ -9,7 +9,7 @@ module API
integration_classes = Helpers::IntegrationsHelpers.integration_classes
if Gitlab.dev_or_test_env?
integrations['mock-ci'] = ::Integrations::MockCi.api_fields
integrations['mock-ci'] = ::Integrations::MockCi.api_arguments
integrations['mock-monitoring'] = []
integration_classes += Helpers::IntegrationsHelpers.development_integration_classes
@ -30,8 +30,8 @@ module API
end
SLASH_COMMAND_INTEGRATIONS = {
'mattermost-slash-commands' => ::Integrations::MattermostSlashCommands.api_fields,
'slack-slash-commands' => ::Integrations::SlackSlashCommands.api_fields
'mattermost-slash-commands' => ::Integrations::MattermostSlashCommands.api_arguments,
'slack-slash-commands' => ::Integrations::SlackSlashCommands.api_arguments
}.freeze
helpers do

View File

@ -30,8 +30,7 @@ module Gitlab
project = kwargs[:project]
kwargs[:namespace] ||= project.namespace if project
increase_total_counter(event_name)
increase_weekly_total_counter(event_name)
update_redis_values(event_name, additional_properties)
update_unique_counters(event_name, kwargs)
trigger_snowplow_event(event_name, category, additional_properties, kwargs) if send_snowplow_event
@ -54,6 +53,19 @@ module Gitlab
nil
end
def convert_event_selection_rule_to_path_part(event_selection_rule)
path = event_selection_rule[:name]
if event_selection_rule[:filter].present?
filter = event_selection_rule[:filter]
sorted_filter_keys = filter.keys.sort
serialized_filter = sorted_filter_keys.map { |key| "#{key}:#{filter[key]}" }.join(',')
path = "#{path}-filter:[#{serialized_filter}]"
end
path
end
private
def validate_properties!(additional_properties, kwargs)
@ -87,16 +99,30 @@ module Gitlab
end
end
def increase_total_counter(event_name)
redis_counter_key = Gitlab::Usage::Metrics::Instrumentations::TotalCountMetric.redis_key(event_name)
def update_redis_values(event_name, additional_properties)
event_definition = Gitlab::Tracking::EventDefinition.find(event_name)
increment(redis_counter_key)
end
return unless event_definition
def increase_weekly_total_counter(event_name)
redis_counter_key = Gitlab::Usage::Metrics::Instrumentations::TotalCountMetric.redis_key(event_name, Date.today)
event_definition.event_selection_rules.each do |event_selection_rule|
matches_filter = event_selection_rule[:filter].all? do |property_name, value|
additional_properties[property_name] == value
end
increment(redis_counter_key, expiry: 6.weeks)
next unless matches_filter
event_specific_part_of_path = convert_event_selection_rule_to_path_part(event_selection_rule)
if event_selection_rule[:time_framed?]
redis_key = Gitlab::Usage::Metrics::Instrumentations::TotalCountMetric.redis_key(
event_specific_part_of_path,
Date.today
)
increment(redis_key, expiry: 6.weeks)
else
increment(Gitlab::Usage::Metrics::Instrumentations::TotalCountMetric.redis_key(event_specific_part_of_path))
end
end
end
def update_unique_counters(event_name, kwargs)

View File

@ -12,10 +12,18 @@ module Gitlab
attr_reader :attributes
class << self
include Gitlab::Utils::StrongMemoize
def definitions
@definitions ||= paths.flat_map { |glob_path| load_all_from_path(glob_path) }
end
def find(event_name)
strong_memoize_with(:find, event_name) do
definitions.find { |definition| definition.attributes[:action] == event_name }
end
end
private
def paths
@ -62,6 +70,31 @@ module Gitlab
ERROR_MSG
end
end
def event_selection_rules
@event_selection_rules ||= find_event_selection_rules
end
private
def find_event_selection_rules
result = [
{ name: attributes[:action], time_framed?: false, filter: {} },
{ name: attributes[:action], time_framed?: true, filter: {} }
]
Gitlab::Usage::MetricDefinition.definitions.each_value do |metric_definition|
metric_definition.attributes[:events]&.each do |event_selection_rule|
if event_selection_rule[:name] == attributes[:action]
result << {
name: attributes[:action],
time_framed?: %w[7d 28d].include?(metric_definition.attributes[:time_frame]),
filter: event_selection_rule[:filter] || {}
}
end
end
end
result.uniq
end
end
end
end

View File

@ -26,23 +26,18 @@ module Gitlab
end
def value
return total_value if time_frame == 'all'
period_value
end
private
def total_value
event_names.sum do |event_name|
redis_usage_data do
total_count(self.class.redis_key(event_name))
end
event_specific_part_of_paths = events.map do |event_selection_rule|
Gitlab::InternalEvents.convert_event_selection_rule_to_path_part(event_selection_rule)
end
end
def period_value
keys = self.class.keys_for_aggregation(events: event_names, **time_constraint)
keys = if time_frame == 'all'
event_specific_part_of_paths.map do |event_specific_part_of_path|
self.class.redis_key(event_specific_part_of_path)
end
else
self.class.keys_for_aggregation(events: event_specific_part_of_paths, **time_constraint)
end
keys.sum do |key|
redis_usage_data do
total_count(key)

View File

@ -21562,9 +21562,6 @@ msgstr ""
msgid "Failed to delete custom emoji. Please try again."
msgstr ""
msgid "Failed to delete model"
msgstr ""
msgid "Failed to deploy to"
msgstr ""
@ -33120,9 +33117,6 @@ msgstr ""
msgid "Modal|Close"
msgstr ""
msgid "Model deleted but failed to remove associated packages"
msgstr ""
msgid "Model experiments"
msgstr ""
@ -33132,9 +33126,6 @@ msgstr ""
msgid "Model version not found"
msgstr ""
msgid "Model was successfully deleted"
msgstr ""
msgid "ModelRegistry|Model registry"
msgstr ""
@ -45437,7 +45428,10 @@ msgstr ""
msgid "ScanExecutionPolicy|%{labelStart}File path:%{labelEnd} %{filePath}"
msgstr ""
msgid "ScanExecutionPolicy|%{overrideSelector}into the %{boldStart}.gitlab-ci.yml%{boldEnd} with the following %{boldStart}pipeline execution file%{boldEnd} from %{projectSelector} And run with reference (Optional) %{refSelector}"
msgid "ScanExecutionPolicy|%{overrideSelector}into the %{boldStart}.gitlab-ci.yml%{boldEnd} with the following %{boldStart}pipeline execution file%{boldEnd} from %{projectSelector}"
msgstr ""
msgid "ScanExecutionPolicy|%{overrideSelector}the %{boldStart}.gitlab-ci.yml%{boldEnd} with the following %{boldStart}pipeline execution file%{boldEnd} from %{projectSelector}"
msgstr ""
msgid "ScanExecutionPolicy|%{period} %{days} around %{time} %{timezoneLabel} %{timezone}"
@ -45482,6 +45476,9 @@ msgstr ""
msgid "ScanExecutionPolicy|DAST site profiles"
msgstr ""
msgid "ScanExecutionPolicy|File reference (Optional) %{refSelector}"
msgstr ""
msgid "ScanExecutionPolicy|If there are any conflicting variables with the local pipeline configuration (Ex, gitlab-ci.yml) then variables defined here will take precedence. %{linkStart}Learn more%{linkEnd}."
msgstr ""
@ -45560,7 +45557,10 @@ msgstr ""
msgid "ScanExecutionPolicy|Select timezone"
msgstr ""
msgid "ScanExecutionPolicy|The content of this pipeline execution YAML file is included in the .gitlab-ci.yml file of the target project. All GitLab CI/CD features are supported."
msgid "ScanExecutionPolicy|The content of this pipeline execution YAML file is injected into the .gitlab-ci.yml file of the target project. All GitLab CI/CD features are supported."
msgstr ""
msgid "ScanExecutionPolicy|The content of this pipeline execution YAML file overrides the .gitlab-ci.yml file of the target project. All GitLab CI/CD features are supported."
msgstr ""
msgid "ScanExecutionPolicy|The file at that project, ref, and path doesn't exist"

View File

@ -77,41 +77,41 @@
"@snowplow/browser-plugin-timezone": "^3.9.0",
"@snowplow/browser-tracker": "^3.9.0",
"@sourcegraph/code-host-integration": "0.0.95",
"@tiptap/core": "^2.1.14",
"@tiptap/extension-blockquote": "^2.1.14",
"@tiptap/extension-bold": "^2.1.14",
"@tiptap/extension-bubble-menu": "^2.1.14",
"@tiptap/extension-bullet-list": "^2.1.14",
"@tiptap/extension-code": "^2.1.14",
"@tiptap/extension-code-block": "^2.1.14",
"@tiptap/extension-code-block-lowlight": "^2.1.14",
"@tiptap/extension-document": "^2.1.14",
"@tiptap/extension-dropcursor": "^2.1.14",
"@tiptap/extension-gapcursor": "^2.1.14",
"@tiptap/extension-hard-break": "^2.1.14",
"@tiptap/extension-heading": "^2.1.14",
"@tiptap/extension-highlight": "^2.1.14",
"@tiptap/extension-history": "^2.1.14",
"@tiptap/extension-horizontal-rule": "^2.1.14",
"@tiptap/extension-image": "^2.1.14",
"@tiptap/extension-italic": "^2.1.14",
"@tiptap/extension-link": "^2.1.14",
"@tiptap/extension-list-item": "^2.1.14",
"@tiptap/extension-ordered-list": "^2.1.14",
"@tiptap/extension-paragraph": "^2.1.14",
"@tiptap/extension-strike": "^2.1.14",
"@tiptap/extension-subscript": "^2.1.14",
"@tiptap/extension-superscript": "^2.1.14",
"@tiptap/extension-table": "^2.1.14",
"@tiptap/extension-table-cell": "^2.1.14",
"@tiptap/extension-table-header": "^2.1.14",
"@tiptap/extension-table-row": "^2.1.14",
"@tiptap/extension-task-item": "^2.1.14",
"@tiptap/extension-task-list": "^2.1.14",
"@tiptap/extension-text": "^2.1.14",
"@tiptap/pm": "^2.1.14",
"@tiptap/suggestion": "^2.1.14",
"@tiptap/vue-2": "^2.1.14",
"@tiptap/core": "^2.4.0",
"@tiptap/extension-blockquote": "^2.4.0",
"@tiptap/extension-bold": "^2.4.0",
"@tiptap/extension-bubble-menu": "^2.4.0",
"@tiptap/extension-bullet-list": "^2.4.0",
"@tiptap/extension-code": "^2.4.0",
"@tiptap/extension-code-block": "^2.4.0",
"@tiptap/extension-code-block-lowlight": "^2.4.0",
"@tiptap/extension-document": "^2.4.0",
"@tiptap/extension-dropcursor": "^2.4.0",
"@tiptap/extension-gapcursor": "^2.4.0",
"@tiptap/extension-hard-break": "^2.4.0",
"@tiptap/extension-heading": "^2.4.0",
"@tiptap/extension-highlight": "^2.4.0",
"@tiptap/extension-history": "^2.4.0",
"@tiptap/extension-horizontal-rule": "^2.4.0",
"@tiptap/extension-image": "^2.4.0",
"@tiptap/extension-italic": "^2.4.0",
"@tiptap/extension-link": "^2.4.0",
"@tiptap/extension-list-item": "^2.4.0",
"@tiptap/extension-ordered-list": "^2.4.0",
"@tiptap/extension-paragraph": "^2.4.0",
"@tiptap/extension-strike": "^2.4.0",
"@tiptap/extension-subscript": "^2.4.0",
"@tiptap/extension-superscript": "^2.4.0",
"@tiptap/extension-table": "^2.4.0",
"@tiptap/extension-table-cell": "^2.4.0",
"@tiptap/extension-table-header": "^2.4.0",
"@tiptap/extension-table-row": "^2.4.0",
"@tiptap/extension-task-item": "^2.4.0",
"@tiptap/extension-task-list": "^2.4.0",
"@tiptap/extension-text": "^2.4.0",
"@tiptap/pm": "^2.4.0",
"@tiptap/suggestion": "^2.4.0",
"@tiptap/vue-2": "^2.4.0",
"@vue/apollo-components": "^4.0.0-beta.4",
"@vue/apollo-option": "^4.0.0-beta.4",
"apollo-upload-client": "15.0.0",

View File

@ -364,6 +364,17 @@ tests = [
ee/spec/graphql/types/remote_development/workspace_type_spec.rb
ee/spec/requests/api/graphql/remote_development/workspace/with_id_arg_spec.rb
]
},
{
explanation: 'Run database dictionary related specs on db/docs changes.',
changed_file: 'db/docs/design_management_repositories.yml',
expected: %w[
ee/spec/lib/gitlab/database/desired_sharding_key_spec.rb
spec/db/docs_spec.rb
spec/lib/gitlab/database/dictionary_spec.rb
spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb
spec/lib/gitlab/database/sharding_key_spec.rb
]
}
]

View File

@ -59,6 +59,14 @@ RSpec.describe ProductAnalyticsTracking, :snowplow, feature_category: :product_a
before do
allow(Gitlab::InternalEvents::EventDefinitions).to receive(:known_event?).with('an_event').and_return(true)
event_definition = instance_double(
Gitlab::Tracking::EventDefinition,
event_selection_rules: [
{ name: event_name, time_framed?: false, filter: {} },
{ name: event_name, time_framed?: true, filter: {} }
]
)
allow(Gitlab::Tracking::EventDefinition).to receive(:find).with(event_name).and_return(event_definition)
end
context 'when user is logged in' do

View File

@ -22,21 +22,13 @@ import {
PROJECT_WIKI_ATTACHMENT_DRAWIO_DIAGRAM_HTML,
} from '../../test_constants';
const TIPTAP_AUDIO_HTML = `<p dir="auto">
<span class="media-container audio-container"><audio src="https://gitlab.com/favicon.png" controls="true" data-setup="{}" data-title="gitlab favicon"></audio><a href="https://gitlab.com/favicon.png" class="with-attachment-icon">gitlab favicon</a></span>
</p>`;
const TIPTAP_AUDIO_HTML = `<p dir="auto"><span class="media-container audio-container"><audio src="https://gitlab.com/favicon.png" controls="true" data-setup="{}" data-title="gitlab favicon"></audio><a href="https://gitlab.com/favicon.png" class="with-attachment-icon">gitlab favicon</a></span></p>`;
const TIPTAP_DIAGRAM_HTML = `<p dir="auto">
<img src="https://gitlab.com/favicon.png" alt="gitlab favicon">
</p>`;
const TIPTAP_DIAGRAM_HTML = `<p dir="auto"><img src="https://gitlab.com/favicon.png" alt="gitlab favicon"></p>`;
const TIPTAP_IMAGE_HTML = `<p dir="auto">
<img src="https://gitlab.com/favicon.png" alt="gitlab favicon">
</p>`;
const TIPTAP_IMAGE_HTML = `<p dir="auto"><img src="https://gitlab.com/favicon.png" alt="gitlab favicon"></p>`;
const TIPTAP_VIDEO_HTML = `<p dir="auto">
<span class="media-container video-container"><video src="https://gitlab.com/favicon.png" controls="true" data-setup="{}" data-title="gitlab favicon"></video><a href="https://gitlab.com/favicon.png" class="with-attachment-icon">gitlab favicon</a></span>
</p>`;
const TIPTAP_VIDEO_HTML = `<p dir="auto"><span class="media-container video-container"><video src="https://gitlab.com/favicon.png" controls="true" data-setup="{}" data-title="gitlab favicon"></video><a href="https://gitlab.com/favicon.png" class="with-attachment-icon">gitlab favicon</a></span></p>`;
const createFakeEvent = () => ({ preventDefault: jest.fn(), stopPropagation: jest.fn() });
@ -110,7 +102,7 @@ describe.each`
tiptapEditor
.chain()
.insertContent(mediaHTML)
.setNodeSelection(4) // select the media
.setNodeSelection(1) // select the media
.run();
contentEditor.resolveUrl.mockResolvedValue(`/group1/project1/-/wikis/${filePath}`);

View File

@ -49,7 +49,7 @@ describe('content_editor/extensions/diagram', () => {
doc(
diagram(
{ language: 'mermaid' },
'pie title NETFLIX\n "Time spent looking for movie" : 90\n "Time spent watching it" : 10',
'pie title NETFLIX "Time spent looking for movie" : 90 "Time spent watching it" : 10',
),
).toJSON(),
);

View File

@ -10,6 +10,7 @@ import EmojiPicker from '~/emoji/components/picker.vue';
import DesignNoteAwardsList from '~/design_management/components/design_notes/design_note_awards_list.vue';
import DesignNote from '~/design_management/components/design_notes/design_note.vue';
import DesignReplyForm from '~/design_management/components/design_notes/design_reply_form.vue';
import ImportedBadge from '~/vue_shared/components/imported_badge.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import designNoteAwardEmojiToggleMutation from '~/design_management/graphql/mutations/design_note_award_emoji_toggle.mutation.graphql';
import { mockAwardEmoji } from '../../mock_data/apollo_mock';
@ -25,6 +26,7 @@ const note = {
},
awardEmoji: mockAwardEmoji,
body: 'test',
imported: false,
userPermissions: {
adminNote: false,
awardEmoji: true,
@ -43,6 +45,7 @@ describe('Design note component', () => {
const findUserAvatar = () => wrapper.findComponent(GlAvatar);
const findUserAvatarLink = () => wrapper.findComponent(GlAvatarLink);
const findImportedBadge = () => wrapper.findComponent(ImportedBadge);
const findUserLink = () => wrapper.findByTestId('user-link');
const findDesignNoteAwardsList = () => wrapper.findComponent(DesignNoteAwardsList);
const findReplyForm = () => wrapper.findComponent(DesignReplyForm);
@ -136,6 +139,10 @@ describe('Design note component', () => {
expect(wrapper.findComponent(TimeAgoTooltip).exists()).toBe(true);
});
it('should not render imported badge', () => {
expect(findImportedBadge().exists()).toBe(false);
});
it('should render emoji awards list', () => {
expect(findDesignNoteAwardsList().exists()).toBe(true);
});
@ -174,6 +181,21 @@ describe('Design note component', () => {
});
});
describe('when note is imported', () => {
it('should render imported badge', () => {
createComponent({
props: {
note: {
...note,
imported: true,
},
},
});
expect(findImportedBadge().props('importableType')).toBe('comment');
});
});
describe('when user has a permission to edit note', () => {
it('should open an edit form on edit button click', async () => {
createComponent({

View File

@ -282,6 +282,7 @@ export const mockNoteSubmitSuccessMutationResponse = {
width: 695,
__typename: 'DiffPosition',
},
imported: false,
userPermissions: {
adminNote: true,
repositionNote: true,

View File

@ -17,6 +17,7 @@ export default {
author: mockAuthor,
awardEmoji: mockAwardEmoji,
createdAt: '2020-05-08T07:10:45Z',
imported: false,
userPermissions: {
repositionNote: true,
awardEmoji: true,
@ -39,6 +40,7 @@ export default {
},
awardEmoji: mockAwardEmoji,
createdAt: '2020-05-08T07:10:45Z',
imported: false,
userPermissions: {
adminNote: true,
awardEmoji: true,

View File

@ -20,6 +20,7 @@ const DISCUSSION_2 = {
},
awardEmoji: mockAwardEmoji,
createdAt: '2020-05-08T07:10:45Z',
imported: false,
userPermissions: {
adminNote: true,
awardEmoji: true,
@ -49,6 +50,7 @@ export const DISCUSSION_3 = {
},
awardEmoji: mockAwardEmoji,
createdAt: '2020-05-09T07:10:45Z',
imported: false,
userPermissions: {
adminNote: true,
awardEmoji: true,

View File

@ -11,6 +11,7 @@ RSpec.describe GitlabSchema.types['Note'], feature_category: :team_planning do
body
body_html
award_emoji
imported
internal
created_at
discussion

View File

@ -2,54 +2,151 @@
require 'spec_helper'
RSpec.describe ::API::Entities::Project do
let(:project) { create(:project, :public) }
let(:current_user) { create(:user) }
let(:options) { { current_user: current_user } }
RSpec.describe ::API::Entities::Project, feature_category: :groups_and_projects do
let_it_be(:project) { create(:project, :private) }
let_it_be(:current_user) { create(:user) }
let(:entity) do
described_class.new(project, options)
end
let(:options) { { current_user: current_user, statistics: true } }
let(:entity) { described_class.new(project, options) }
subject(:json) { entity.as_json }
context 'without project feature' do
before do
project.project_feature.destroy!
project.reload
before do
allow(Gitlab.config.registry).to receive(:enabled).and_return(true)
end
context 'as a guest' do
before_all do
project.add_guest(current_user)
end
it 'returns a response' do
expect(json[:issues_access_level]).to be_nil
expect(json[:repository_access_level]).to be_nil
expect(json[:merge_requests_access_level]).to be_nil
it 'exposes the correct attributes' do
expect(json.keys).to contain_exactly(
:id, :description, :name, :name_with_namespace, :path, :path_with_namespace,
:created_at, :tag_list, :topics, :ssh_url_to_repo, :http_url_to_repo, :web_url,
:avatar_url, :star_count, :last_activity_at, :namespace, :container_registry_image_prefix,
:_links, :empty_repo, :archived, :visibility, :owner, :open_issues_count,
:description_html, :updated_at, :can_create_merge_request_in, :shared_with_groups
)
end
end
describe '.service_desk_address', feature_category: :service_desk do
before do
allow(project).to receive(:service_desk_enabled?).and_return(true)
context 'as a reporter' do
before_all do
project.add_reporter(current_user)
end
context 'when a user can admin issues' do
it 'exposes the correct attributes' do
expect(json.keys).to contain_exactly(
:id, :description, :name, :name_with_namespace, :path, :path_with_namespace,
:created_at, :default_branch, :tag_list, :topics, :ssh_url_to_repo, :http_url_to_repo,
:web_url, :readme_url, :forks_count, :avatar_url, :star_count, :last_activity_at,
:namespace, :container_registry_image_prefix, :_links, :empty_repo, :archived,
:visibility, :owner, :open_issues_count, :description_html, :updated_at,
:can_create_merge_request_in, :statistics, :ci_config_path, :shared_with_groups, :service_desk_address
)
end
end
context 'as a developer' do
before_all do
project.add_developer(current_user)
end
it 'exposes the correct attributes' do
expect(json.keys).to contain_exactly(
:id, :description, :name, :name_with_namespace, :path, :path_with_namespace,
:created_at, :default_branch, :tag_list, :topics, :ssh_url_to_repo, :http_url_to_repo,
:web_url, :readme_url, :forks_count, :avatar_url, :star_count, :last_activity_at,
:namespace, :container_registry_image_prefix, :_links, :empty_repo, :archived,
:visibility, :owner, :open_issues_count, :description_html, :updated_at,
:can_create_merge_request_in, :statistics, :ci_config_path, :shared_with_groups, :service_desk_address
)
end
end
context 'as a maintainer' do
before_all do
project.add_maintainer(current_user)
end
it 'exposes the correct attributes' do
expected_fields = [
:id, :description, :name, :name_with_namespace, :path, :path_with_namespace,
:created_at, :default_branch, :tag_list, :topics, :ssh_url_to_repo, :http_url_to_repo,
:web_url, :readme_url, :forks_count, :avatar_url, :star_count, :last_activity_at,
:namespace, :container_registry_image_prefix, :_links, :empty_repo, :archived,
:visibility, :owner, :open_issues_count, :description_html, :updated_at,
:can_create_merge_request_in, :statistics, :ci_config_path, :shared_with_groups, :service_desk_address,
:emails_disabled, :emails_enabled, :resolve_outdated_diff_discussions,
:container_expiration_policy, :repository_object_format, :shared_runners_enabled,
:lfs_enabled, :creator_id, :import_url, :import_type, :import_status,
:import_error, :ci_default_git_depth, :ci_forward_deployment_enabled,
:ci_forward_deployment_rollback_allowed, :ci_job_token_scope_enabled,
:ci_separated_caches, :ci_allow_fork_pipelines_to_run_in_parent_project, :build_git_strategy,
:keep_latest_artifact, :restrict_user_defined_variables, :runners_token,
:runner_token_expiration_interval, :group_runners_enabled, :auto_cancel_pending_pipelines,
:build_timeout, :auto_devops_enabled, :auto_devops_deploy_strategy, :public_jobs,
:only_allow_merge_if_pipeline_succeeds, :allow_merge_on_skipped_pipeline,
:request_access_enabled, :only_allow_merge_if_all_discussions_are_resolved,
:remove_source_branch_after_merge, :printing_merge_request_link_enabled,
:merge_method, :squash_option, :enforce_auth_checks_on_uploads,
:suggestion_commit_message, :merge_commit_template, :squash_commit_template,
:issue_branch_template, :warn_about_potentially_unwanted_characters,
:autoclose_referenced_issues, :packages_enabled, :service_desk_enabled, :issues_enabled,
:merge_requests_enabled, :wiki_enabled, :jobs_enabled, :snippets_enabled,
:container_registry_enabled, :issues_access_level, :repository_access_level,
:merge_requests_access_level, :forking_access_level, :wiki_access_level,
:builds_access_level, :snippets_access_level, :pages_access_level, :analytics_access_level,
:container_registry_access_level, :security_and_compliance_access_level,
:releases_access_level, :environments_access_level, :feature_flags_access_level,
:infrastructure_access_level, :monitor_access_level, :model_experiments_access_level,
:model_registry_access_level
]
if Gitlab.ee?
expected_fields += [
:requirements_enabled, :security_and_compliance_enabled,
:requirements_access_level, :compliance_frameworks
]
end
expect(json.keys).to match(expected_fields)
end
context 'without project feature' do
before do
project.add_reporter(current_user)
project.project_feature.destroy!
project.reload
end
it 'is present' do
expect(json[:service_desk_address]).to be_present
end
end
context 'when a user can not admin project' do
it 'is empty' do
expect(json[:service_desk_address]).to be_nil
it 'returns nil for all features' do
expect(json[:issues_access_level]).to be_nil
expect(json[:repository_access_level]).to be_nil
expect(json[:merge_requests_access_level]).to be_nil
expect(json[:forking_access_level]).to be_nil
expect(json[:wiki_access_level]).to be_nil
expect(json[:builds_access_level]).to be_nil
expect(json[:snippets_access_level]).to be_nil
expect(json[:pages_access_level]).to be_nil
expect(json[:analytics_access_level]).to be_nil
expect(json[:container_registry_access_level]).to be_nil
expect(json[:security_and_compliance_access_level]).to be_nil
expect(json[:releases_access_level]).to be_nil
expect(json[:environments_access_level]).to be_nil
expect(json[:feature_flags_access_level]).to be_nil
expect(json[:infrastructure_access_level]).to be_nil
expect(json[:monitor_access_level]).to be_nil
expect(json[:model_experiments_access_level]).to be_nil
expect(json[:model_registry_access_level]).to be_nil
end
end
end
describe '.shared_with_groups' do
let(:group) { create(:group, :private) }
describe 'shared_with_groups' do
let_it_be(:group) { create(:group, :private) }
subject(:shared_with_groups) { json[:shared_with_groups].as_json }
before do
project.project_group_links.create!(group: group)
@ -57,39 +154,17 @@ RSpec.describe ::API::Entities::Project do
context 'when the current user does not have access to the group' do
it 'is empty' do
expect(json[:shared_with_groups]).to be_empty
expect(shared_with_groups).to be_empty
end
end
context 'when the current user has access to the group' do
before do
before_all do
group.add_guest(current_user)
end
it 'contains information about the shared group' do
expect(json[:shared_with_groups]).to contain_exactly(include(group_id: group.id))
end
end
end
describe '.ci/cd settings' do
context 'when the user is not an admin' do
before do
project.add_reporter(current_user)
end
it 'does not return ci settings' do
expect(json[:ci_default_git_depth]).to be_nil
end
end
context 'when the user has admin privileges' do
before do
project.add_maintainer(current_user)
end
it 'returns ci settings' do
expect(json[:ci_default_git_depth]).to be_present
expect(shared_with_groups[0]['group_id']).to eq(group.id)
end
end
end

View File

@ -16,6 +16,8 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
allow(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
allow(Gitlab::Tracking).to receive(:tracker).and_return(fake_snowplow)
allow(Gitlab::InternalEvents::EventDefinitions).to receive(:unique_properties).and_return(unique_properties)
allow(Gitlab::Tracking::EventDefinition).to receive(:find).and_return(event_definition)
allow(event_definition).to receive(:event_selection_rules).and_return(event_selection_rules)
allow(fake_snowplow).to receive(:event)
end
@ -41,10 +43,8 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
end
def expect_redis_tracking
call_index = 0
expect(redis).to have_received(:incr).twice do |redis_key|
expect(redis_key).to end_with(redis_arguments[call_index])
call_index += 1
redis_arguments.each do |redis_argument|
expect(redis).to have_received(:incr).with(a_string_ending_with(redis_argument))
end
end
@ -98,13 +98,22 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
let_it_be(:additional_properties) { {} }
let(:redis) { instance_double('Redis') }
let(:event_definition) { instance_double(Gitlab::Tracking::EventDefinition) }
let(:event_selection_rules) do
[
{ name: event_name, time_framed?: false, filter: {} },
{ name: event_name, time_framed?: true, filter: {} }
]
end
let(:fake_snowplow) { instance_double(Gitlab::Tracking::Destinations::Snowplow) }
let(:event_name) { 'g_edit_by_web_ide' }
let(:category) { 'InternalEventTracking' }
let(:unique_properties) { [:user] }
let(:unique_value) { user.id }
let(:property_name) { :user }
let(:redis_arguments) { [event_name, Date.today.strftime('%G-%V')] }
let(:week_suffix) { Date.today.strftime('%G-%V') }
let(:redis_arguments) { [event_name, week_suffix] }
context 'when only user is passed' do
let(:project) { nil }
@ -174,6 +183,79 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
expect_snowplow_tracking(nil, additional_properties)
end
context 'when a filter is defined' do
let(:time_framed) { true }
let(:event_selection_rules) do
[
{ name: event_name, time_framed?: time_framed, filter: {} },
{ name: event_name, time_framed?: time_framed, filter: { label: 'label_name' } },
{ name: event_name, time_framed?: time_framed, filter: { label: 'another_label_value' } },
{ name: event_name, time_framed?: time_framed, filter: { label: 'label_name', value: 16.17 } }
]
end
context 'when event selection rule is time framed' do
let(:redis_arguments) do
[
"filter:[label:label_name]-#{week_suffix}",
"filter:[label:label_name,value:16.17]-#{week_suffix}",
"#{event_name}-#{week_suffix}"
]
end
it 'updates the correct redis keys' do
described_class.track_event(
event_name,
additional_properties: additional_properties,
user: user,
project: project
)
expect_redis_tracking
end
end
context 'when event selection rule is not time framed' do
let(:time_framed) { false }
let(:redis_arguments) do
[
"filter:[label:label_name]",
"filter:[label:label_name,value:16.17]",
event_name.to_s
]
end
context 'when a matching event is tracked' do
it 'updates the matching redis keys' do
described_class.track_event(
event_name,
additional_properties: additional_properties,
user: user,
project: project
)
expect_redis_tracking
end
end
context 'when a non-matching event is tracked' do
let(:additional_properties) { { label: 'unrelated_string' } }
let(:redis_arguments) { [event_name.to_s] }
it 'updates only the matching redis keys' do
described_class.track_event(
event_name,
additional_properties: additional_properties,
user: user,
project: project
)
expect_redis_tracking
end
end
end
end
end
context 'when feature_enabled_by_namespace_ids is passed' do
@ -312,16 +394,7 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
end
context 'when unique key is defined' do
let(:event_name) { 'p_ci_templates_terraform_base_latest' }
let(:unique_value) { project.id }
let(:property_names) { [:project] }
let(:property_name) { :project }
before do
allow(Gitlab::InternalEvents::EventDefinitions).to receive(:unique_properties)
.with(event_name)
.and_return(property_names)
end
let(:event_name) { 'i_code_review_saved_replies_use_in_other' }
it 'is used when logging to RedisHLL', :aggregate_failures do
described_class.track_event(event_name, user: user, project: project)
@ -332,9 +405,9 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
end
context 'when property is missing' do
let(:unique_value) { project.id }
let(:property_names) { [:project] }
let(:property_name) { :project }
let(:unique_value) { user.id }
let(:property_names) { [:user] }
let(:property_name) { :user }
it 'logs error' do
expect { described_class.track_event(event_name, merge_request_id: 1) }.not_to raise_error
@ -345,7 +418,7 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
end
context 'when there are multiple unique keys' do
let(:property_names) { [:project, :user] }
let(:unique_properties) { [:project, :user] }
it 'all of them are used when logging to RedisHLL', :aggregate_failures do
described_class.track_event(event_name, user: user, project: project)
@ -386,6 +459,34 @@ RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_ana
end
end
describe '#convert_event_selection_rule_to_path_part' do
context 'without a filter' do
let(:event_selection_rule) { { name: 'example_event' } }
it 'returns the event name' do
expect(described_class.convert_event_selection_rule_to_path_part(event_selection_rule)).to eq('example_event')
end
end
context 'with a single property filter' do
let(:event_selection_rule) { { name: 'example_event', filter: { label: 'npm' } } }
it 'returns the correct path with filter' do
expect(described_class.convert_event_selection_rule_to_path_part(event_selection_rule))
.to eq('example_event-filter:[label:npm]')
end
end
context 'with a multi property filter that are unordered' do
let(:event_selection_rule) { { name: 'example_event', filter: { property: 'deploy_token', label: 'npm' } } }
it 'returns the correct path with filter' do
expect(described_class.convert_event_selection_rule_to_path_part(event_selection_rule))
.to eq('example_event-filter:[label:npm,property:deploy_token]')
end
end
end
describe 'Product Analytics tracking' do
let(:app_id) { 'foobar' }
let(:url) { 'http://localhost:4000' }

View File

@ -149,4 +149,28 @@ RSpec.describe Gitlab::Tracking::EventDefinition, feature_category: :service_pin
end
end
end
describe '.find' do
let(:event_definition1) { described_class.new(nil, { action: 'event1' }) }
let(:event_definition2) { described_class.new(nil, { action: 'event2' }) }
before do
described_class.clear_memoization(:find)
allow(described_class).to receive(:definitions).and_return([event_definition1, event_definition2])
end
it 'finds the event definition by action' do
expect(described_class.find('event1')).to eq(event_definition1)
end
it 'memorizes results' do
expect(described_class).to receive(:definitions).exactly(3).times.and_call_original
10.times do
described_class.find('event1')
described_class.find('event2')
described_class.find('non-existing-event')
end
end
end
end

View File

@ -6,6 +6,24 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::TotalCountMetric, :clea
feature_category: :product_analytics_data_management do
before do
allow(Gitlab::InternalEvents::EventDefinitions).to receive(:known_event?).and_return(true)
event_definition = instance_double(
Gitlab::Tracking::EventDefinition,
event_selection_rules: [{ name: 'my_event', time_framed?: false, filter: {} }]
)
allow(Gitlab::Tracking::EventDefinition).to receive(:find).with('my_event').and_return(event_definition)
event_definition1 = instance_double(
Gitlab::Tracking::EventDefinition,
event_selection_rules: [{ name: 'my_event1', time_framed?: false, filter: {} }]
)
allow(Gitlab::Tracking::EventDefinition).to receive(:find).with('my_event1').and_return(event_definition1)
event_definition2 = instance_double(
Gitlab::Tracking::EventDefinition,
event_selection_rules: [{ name: 'my_event2', time_framed?: false, filter: {} }]
)
allow(Gitlab::Tracking::EventDefinition).to receive(:find).with('my_event2').and_return(event_definition2)
end
context 'with multiple similar events' do

View File

@ -479,6 +479,32 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
end
end
describe 'scopes for preloading' do
let_it_be(:runner) { create(:ci_runner) }
let_it_be(:user) { create(:user).tap { |user| create(:user_detail, user: user) } }
before_all do
build = create(:ci_build, :trace_artifact, :artifacts, :test_reports, pipeline: pipeline)
build.runner = runner
build.user = user
build.save!
end
describe '.eager_load_for_api' do
subject(:eager_load_for_api) { described_class.eager_load_for_api }
it { expect(eager_load_for_api.last.association(:user)).to be_loaded }
it { expect(eager_load_for_api.last.user.association(:user_detail)).to be_loaded }
it { expect(eager_load_for_api.last.association(:metadata)).to be_loaded }
it { expect(eager_load_for_api.last.association(:job_artifacts_archive)).to be_loaded }
it { expect(eager_load_for_api.last.association(:job_artifacts)).to be_loaded }
it { expect(eager_load_for_api.last.association(:runner)).to be_loaded }
it { expect(eager_load_for_api.last.association(:tags)).to be_loaded }
it { expect(eager_load_for_api.last.association(:pipeline)).to be_loaded }
it { expect(eager_load_for_api.last.pipeline.association(:project)).to be_loaded }
end
end
describe '#stick_build_if_status_changed' do
it 'sticks the build if the status changed' do
job = create(:ci_build, :pending, pipeline: pipeline)

View File

@ -36,9 +36,9 @@ RSpec.describe Integrations::BeyondIdentity, feature_category: :integrations do
end
end
describe '.api_fields' do
it 'returns api fields' do
expect(described_class.api_fields).to eq([{
describe '.api_arguments' do
it 'returns api arguments' do
expect(described_class.api_arguments).to eq([{
required: true,
name: :token,
type: String,

View File

@ -426,16 +426,16 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do
expect(json_job['pipeline']['status']).to eq job.pipeline.status
end
it 'avoids N+1 queries', :skip_before_request do
it 'avoids N+1 queries', :skip_before_request, :use_sql_query_cache do
first_build = create(:ci_build, :trace_artifact, :artifacts, :test_reports, pipeline: pipeline)
first_build.runner = create(:ci_runner)
first_build.user = create(:user)
first_build.save!
control = ActiveRecord::QueryRecorder.new { go }
control = ActiveRecord::QueryRecorder.new(skip_cached: false) { go }
5.times do
second_pipeline = create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch)
second_pipeline = create(:ci_pipeline, project: project, sha: project.commit.id, ref: project.default_branch)
second_build = create(:ci_build, :trace_artifact, :artifacts, :test_reports, pipeline: second_pipeline)
second_build.runner = create(:ci_runner)
second_build.user = create(:user)

View File

@ -0,0 +1,110 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Deleting a model', feature_category: :mlops do
using RSpec::Parameterized::TableSyntax
include GraphqlHelpers
let_it_be_with_reload(:model) { create(:ml_models) }
let_it_be(:user) { create(:user) }
let(:project) { model.project }
let(:id) { model.to_global_id.to_s }
let(:query) do
<<~GQL
model {
id
}
errors
GQL
end
let(:params) { { project_path: project.full_path, id: id } }
let(:mutation) { graphql_mutation(:ml_model_delete, params, query) }
let(:mutation_response) { graphql_mutation_response(:ml_model_delete) }
shared_examples 'destroying the model' do
it 'destroys model' do
expect(::Ml::DestroyModelService).to receive(:new).with(model, user).and_call_original
expect { mutation_request }.to change { ::Ml::Model.count }.by(-1)
expect(mutation_response['model']).to eq({ "id" => id })
end
it_behaves_like 'returning response status', :success
end
shared_examples 'denying the mutation request' do
it 'does not delete the model' do
expect(::Ml::DestroyModelService).not_to receive(:new)
expect { mutation_request }.to not_change { ::Ml::Model.count }
expect(mutation_response).to be_nil
expect_graphql_errors_to_include("The resource that you are attempting to access does not exist or you don't " \
"have permission to perform this action")
end
it_behaves_like 'returning response status', :success
end
shared_examples 'model was not found' do
it 'does not delete the model' do
expect(::Ml::DestroyModelService).not_to receive(:new)
expect { mutation_request }.to not_change { ::Ml::Model.count }
expect(mutation_response["errors"]).to match_array(['Model not found'])
end
it_behaves_like 'returning response status', :success
end
describe 'post graphql mutation' do
subject(:mutation_request) { post_graphql_mutation(mutation, current_user: user) }
context 'with valid id' do
where(:user_role, :mutation_behavior) do
:reporter | 'destroying the model'
:guest | 'denying the mutation request'
end
with_them do
before do
project.public_send("add_#{user_role}", user)
end
it_behaves_like params[:mutation_behavior]
end
end
context 'with authorized user' do
before do
project.add_maintainer(user)
end
context 'with invalid id' do
let(:params) { { project_path: project.full_path, id: "gid://gitlab/Ml::Model/#{non_existing_record_id}" } }
it_behaves_like 'model was not found'
end
context 'when an error occurs' do
it 'returns the errors in the response' do
allow_next_found_instance_of(::Ml::Model) do |model|
allow(model).to receive(:destroy).and_return(nil)
errors = ActiveModel::Errors.new(model).tap { |e| e.add(:id, 'some error') }
allow(model).to receive(:errors).and_return(errors)
end
mutation_request
expect(mutation_response['errors']).to match_array(['Id some error'])
end
end
end
end
end

View File

@ -106,11 +106,13 @@ RSpec.describe 'Destroying a model', feature_category: :mlops do
it 'returns the errors in the response' do
allow_next_found_instance_of(::Ml::Model) do |model|
allow(model).to receive(:destroy).and_return(nil)
errors = ActiveModel::Errors.new(model).tap { |e| e.add(:id, 'some error') }
allow(model).to receive(:errors).and_return(errors)
end
mutation_request
expect(mutation_response['errors']).to match_array(['Failed to delete model'])
expect(mutation_response['errors']).to match_array(['Id some error'])
end
end
end

View File

@ -1735,14 +1735,6 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and
expect(json_response.map { |project| project['id'] }).to contain_exactly(public_project.id)
end
it 'includes container_registry_access_level' do
get api("/users/#{user4.id}/projects/", user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
expect(json_response.first.keys).to include('container_registry_access_level')
end
context 'filter by updated_at' do
it 'returns only projects updated on the given timeframe' do
get api("/users/#{user.id}/projects", user),

View File

@ -36,14 +36,14 @@ RSpec.describe ::Ml::DestroyModelService, feature_category: :mlops do
before do
allow_next_instance_of(Packages::MarkPackagesForDestructionService) do |instance|
allow(instance).to receive(:execute).and_return ServiceResponse.error(message: "")
allow(instance).to receive(:execute).and_return ServiceResponse.error(message: "An error")
end
end
it 'returns success with warning', :aggregate_failures do
expect { service_result }.to change { Ml::Model.count }.by(-1).and change { Ml::ModelVersion.count }.by(-1)
expect(service_result).to be_success
expect(service_result.message).to eq('Model deleted but failed to remove associated packages')
expect { service_result }.not_to change { Ml::Model.count }
expect(service_result).to be_error
expect(service_result.message).to eq("An error")
end
end
end

View File

@ -146,6 +146,17 @@ mapping:
- 'spec/db/docs_spec.rb'
- 'ee/spec/lib/ee/gitlab/database/docs/docs_spec.rb'
# Run database dictionary related specs on db/docs changes.
# https://gitlab.com/gitlab-org/quality/engineering-productivity/master-broken-incidents/-/issues/6276
- source: 'db/docs/.+\.yml'
test:
# Found via `git grep Database::Dictionary -l spec ee/spec`
- 'ee/spec/lib/gitlab/database/desired_sharding_key_spec.rb'
- 'spec/db/docs_spec.rb'
- 'spec/lib/gitlab/database/dictionary_spec.rb'
- 'spec/lib/gitlab/database/no_new_tables_with_gitlab_main_schema_spec.rb'
- 'spec/lib/gitlab/database/sharding_key_spec.rb'
# See https://gitlab.com/gitlab-org/quality/engineering-productivity/master-broken-incidents/-/issues/1360
- source: 'vendor/project_templates/.*'
test: 'spec/lib/gitlab/project_template_spec.rb'

574
yarn.lock
View File

@ -1826,11 +1826,6 @@
resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.3.tgz#0300943770e04231041a51bd39f0439b5c7ab4f0"
integrity sha512-nkalE/f1RvRGChwBnEIoBfSEYOXnCRdleKuv6+lePbMDrMZXeDQnqak5XDOeBgrPPyPfAdcCu/B5z+v3VhplGg==
"@linaria/core@3.0.0-beta.13":
version "3.0.0-beta.13"
resolved "https://registry.yarnpkg.com/@linaria/core/-/core-3.0.0-beta.13.tgz#049c5be5faa67e341e413a0f6b641d5d78d91056"
integrity sha512-3zEi5plBCOsEzUneRVuQb+2SAx3qaC1dj0FfFAI6zIJQoDWu0dlSwKijMRack7oO9tUWrchfj3OkKQAd1LBdVg==
"@mattiasbuelens/web-streams-adapter@^0.1.0":
version "0.1.0"
resolved "https://registry.yarnpkg.com/@mattiasbuelens/web-streams-adapter/-/web-streams-adapter-0.1.0.tgz#607b5a25682f4ae2741da7ba6df39302505336b3"
@ -1915,40 +1910,10 @@
resolved "https://registry.yarnpkg.com/@rails/ujs/-/ujs-7.0.8-1.tgz#4156942025aa6b016d3d9a7df3e542ac962359e2"
integrity sha512-uZRCeEl6zY/9JvjTpQilbJyBqFr4onNJTkQTJgfsXDZPJyqCAvyGGraO0LaiHvWKNqD1fTRamdSYg/l+U7daVA==
"@remirror/core-constants@^2.0.0":
version "2.0.0"
resolved "https://registry.yarnpkg.com/@remirror/core-constants/-/core-constants-2.0.0.tgz#a52f89059d93955e00810023cc76b4f7db9650bf"
integrity sha512-vpePPMecHJllBqCWXl6+FIcZqS+tRUM2kSCCKFeEo1H3XUEv3ocijBIPhnlSAa7g6maX+12ATTgxrOsLpWVr2g==
dependencies:
"@babel/runtime" "^7.13.10"
"@remirror/core-helpers@^2.0.1":
version "2.0.1"
resolved "https://registry.yarnpkg.com/@remirror/core-helpers/-/core-helpers-2.0.1.tgz#6847666a009ada8c9b9f3a093c13a6d07a95d9bb"
integrity sha512-s8M1pn33aBUhduvD1QR02uUQMegnFkGaTr4c1iBzxTTyg0rbQstzuQ7Q8TkL6n64JtgCdJS9jLz2dONb2meBKQ==
dependencies:
"@babel/runtime" "^7.13.10"
"@linaria/core" "3.0.0-beta.13"
"@remirror/core-constants" "^2.0.0"
"@remirror/types" "^1.0.0"
"@types/object.omit" "^3.0.0"
"@types/object.pick" "^1.3.1"
"@types/throttle-debounce" "^2.1.0"
case-anything "^2.1.10"
dash-get "^1.0.2"
deepmerge "^4.2.2"
fast-deep-equal "^3.1.3"
make-error "^1.3.6"
object.omit "^3.0.0"
object.pick "^1.3.0"
throttle-debounce "^3.0.1"
"@remirror/types@^1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@remirror/types/-/types-1.0.0.tgz#cc8764440089a2ada71f149c409739575b73b12e"
integrity sha512-7HQbW7k8VxrAtfzs9FxwO6XSDabn8tSFDi1wwzShOnU+cvaYpfxu0ygyTk3TpXsag1hgFKY3ZIlAfB4WVz2LkQ==
dependencies:
type-fest "^2.0.0"
"@remirror/core-constants@^2.0.2":
version "2.0.2"
resolved "https://registry.yarnpkg.com/@remirror/core-constants/-/core-constants-2.0.2.tgz#f05eccdc69e3a65e7d524b52548f567904a11a1a"
integrity sha512-dyHY+sMF0ihPus3O27ODd4+agdHMEmuRdyiZJ2CCWjPV5UFmn17ZbElvk6WOGVE4rdCJKZQCrPV2BcikOMLUGQ==
"@rollup/plugin-graphql@^2.0.4":
version "2.0.4"
@ -2673,213 +2638,213 @@
dom-accessibility-api "^0.5.1"
pretty-format "^26.4.2"
"@tiptap/core@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/core/-/core-2.1.14.tgz#c30d2227891554c0ad1038d84b0f0c44deebf77b"
integrity sha512-X8FWXWhxrOklNEdhDkSa4PekF3BwGjDfhq7Es95OrdJ3vZ1a5lkbCdx4jXErsX1C4TaIs7cI3tqdflTXhqjLmg==
"@tiptap/core@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/core/-/core-2.4.0.tgz#6f8eee8beb5b89363582366b201ccc4798ac98a9"
integrity sha512-YJSahk8pkxpCs8SflCZfTnJpE7IPyUWIylfgXM2DefjRQa5DZ+c6sNY0s/zbxKYFQ6AuHVX40r9pCfcqHChGxQ==
"@tiptap/extension-blockquote@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-blockquote/-/extension-blockquote-2.1.14.tgz#3c5decf6316ea1d299330f33e77d36047aa8ec54"
integrity sha512-hslTfGzlC52lq3EGaxl1V8tGFsnjGLIlYr5SGJzPYwQcr2WHU/WJZli66HB+8N2o+ox5Cp4gQRNDUd9XsfxChg==
"@tiptap/extension-blockquote@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-blockquote/-/extension-blockquote-2.4.0.tgz#0179076ea2fa12e41a198dad087b81d368653b8d"
integrity sha512-nJJy4KsPgQqWTTDOWzFRdjCfG5+QExfZj44dulgDFNh+E66xhamnbM70PklllXJgEcge7xmT5oKM0gKls5XgFw==
"@tiptap/extension-bold@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-bold/-/extension-bold-2.1.14.tgz#9a0d0274bd0569d5ed37720554264f2a7d3d8bac"
integrity sha512-LeIRHjc6LsZ4JVuvbrb2U18IHvaYwP4+O6lIG2riTmvuqhc1UL2dKeG8X13xfk7OttA89Vkkb/XdjzQvcT1I0Q==
"@tiptap/extension-bold@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-bold/-/extension-bold-2.4.0.tgz#b5ced2c3bf51f304890137dbdf394d58c01eb208"
integrity sha512-csnW6hMDEHoRfxcPRLSqeJn+j35Lgtt1YRiOwn7DlS66sAECGRuoGfCvQSPij0TCDp4VCR9if5Sf8EymhnQumQ==
"@tiptap/extension-bubble-menu@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-bubble-menu/-/extension-bubble-menu-2.1.14.tgz#6a23066934969fc89d2cd8b3b3932e1364712ee9"
integrity sha512-9+KsP2rCVymlSKXx7BhPF9xy7dj2/G7auu7qZ4AJzEbsLj1PMS8/pSjPUabCIN6z+9IeifOa2VKmXCnVfcpazw==
"@tiptap/extension-bubble-menu@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-bubble-menu/-/extension-bubble-menu-2.4.0.tgz#a079329318fc21407f9a3c9c3da6ef72cb0b4ab6"
integrity sha512-s99HmttUtpW3rScWq8rqk4+CGCwergNZbHLTkF6Rp6TSboMwfp+rwL5Q/JkcAG9KGLso1vGyXKbt1xHOvm8zMw==
dependencies:
tippy.js "^6.3.7"
"@tiptap/extension-bullet-list@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-bullet-list/-/extension-bullet-list-2.1.14.tgz#0e7f07c0f8f51de8378fdea49e7c9626ebb1ffcd"
integrity sha512-dbnYDGNkbtFaCQIqNsOD9cc2JewN4Ref3Qq0NrVoh+MbbX2oJN2vA8rrKmEv1GhxDjtvaj2RiH1ki5XW3P98UQ==
"@tiptap/extension-bullet-list@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-bullet-list/-/extension-bullet-list-2.4.0.tgz#60eea05b5ac8c8e8d615c057559fddb95033abeb"
integrity sha512-9S5DLIvFRBoExvmZ+/ErpTvs4Wf1yOEs8WXlKYUCcZssK7brTFj99XDwpHFA29HKDwma5q9UHhr2OB2o0JYAdw==
"@tiptap/extension-code-block-lowlight@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-code-block-lowlight/-/extension-code-block-lowlight-2.1.14.tgz#f7b9bd0b6a2d6bbbfd2a10d2192831584e132d41"
integrity sha512-FSO8LRt2Ja8d/WQ340z1gW4AUKBECJHxw08ADV0mJpjY+6NnfOID1lqL02Gy15CjO/amccE1DqPHGlcC39WU+Q==
"@tiptap/extension-code-block-lowlight@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-code-block-lowlight/-/extension-code-block-lowlight-2.4.0.tgz#76cd2ffcb1dcd7ba4b2de4db4c561bd43420af3c"
integrity sha512-j0SdFq66A97Cn7bQOMqFYBaYsmOltZZ6o4uDZH6fdTvEFbfXTdtTYs2awsNSbW+w/DtivKZCvAX1FRLR3/g/5A==
"@tiptap/extension-code-block@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-code-block/-/extension-code-block-2.1.14.tgz#5220bd76d0e957e59898ef145142394dbfd58124"
integrity sha512-D+F+bGrmbXzIkZuKUaM5fhJHVoUmDyTdWCqOMOzG5t53GgMDdLQF7LTzOGC2iAVu0CtAxhUEsoIlzPBdV2FKrA==
"@tiptap/extension-code-block@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-code-block/-/extension-code-block-2.4.0.tgz#b7f1da4825677a2ea6b8e970a1197877551e5dc8"
integrity sha512-QWGdv1D56TBGbbJSj2cIiXGJEKguPiAl9ONzJ/Ql1ZksiQsYwx0YHriXX6TOC//T4VIf6NSClHEtwtxWBQ/Csg==
"@tiptap/extension-code@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-code/-/extension-code-2.1.14.tgz#1e62e5e138981b26675a1e2a7d6f446b99727a82"
integrity sha512-7fuDW0+nyzxTlGEdkkrGMkz5b90xAvZq7EPnta13Px7FsSy771dpbWer7xMbpWGh7VYxOG6qpWJouLLrx2FKyQ==
"@tiptap/extension-code@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-code/-/extension-code-2.4.0.tgz#3a9fed3585bf49f445505c2e9ad71fd66e117304"
integrity sha512-wjhBukuiyJMq4cTcK3RBTzUPV24k5n1eEPlpmzku6ThwwkMdwynnMGMAmSF3fErh3AOyOUPoTTjgMYN2d10SJA==
"@tiptap/extension-document@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-document/-/extension-document-2.1.14.tgz#9b5234a7881f8ef222924a67ef771a2bfff8a75e"
integrity sha512-plOcTQBCysUyz8AXrkBhhAqa+ALyeGJPOku0L3lS6MCSAPM2/KRW/H4KXcrfW0G1lHKiJ4OkP8oHksxa6Id5zg==
"@tiptap/extension-document@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-document/-/extension-document-2.4.0.tgz#a396b2cbcc8708aa2a0a41d0be481fda4b61c77b"
integrity sha512-3jRodQJZDGbXlRPERaloS+IERg/VwzpC1IO6YSJR9jVIsBO6xC29P3cKTQlg1XO7p6ZH/0ksK73VC5BzzTwoHg==
"@tiptap/extension-dropcursor@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-dropcursor/-/extension-dropcursor-2.1.14.tgz#a6f2df2a36a5457a3afff3c933b5328147be8f80"
integrity sha512-ZupJ/3ukcuFK/HhWbD7vuEKt10RC1/Jbk8O+HHcAWftAghsXNAnCsKWhJhAs/MvvoBFQEkmVOdPXvQsDXbbCMw==
"@tiptap/extension-dropcursor@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-dropcursor/-/extension-dropcursor-2.4.0.tgz#8f54908f84a4ab7d2d7de7fc0197511138445740"
integrity sha512-c46HoG2PEEpSZv5rmS5UX/lJ6/kP1iVO0Ax+6JrNfLEIiDULUoi20NqdjolEa38La2VhWvs+o20OviiTOKEE9g==
"@tiptap/extension-floating-menu@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-floating-menu/-/extension-floating-menu-2.1.14.tgz#13f9e6cf62bfc3bec5e05e2bac0f6a1f66a909bf"
integrity sha512-o/yNaZ+ntMBCjFL95JyX6LoVb8fsrx0IsnlNtnGUVr8mpOg2JyeN2ZJpUhPo2aR7QuyfdR1XsGG4TRHJBp3fGg==
"@tiptap/extension-floating-menu@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-floating-menu/-/extension-floating-menu-2.4.0.tgz#75c48b98d0f833251eab70f269ed186f48fc398e"
integrity sha512-vLb9v+htbHhXyty0oaXjT3VC8St4xuGSHWUB9GuAJAQ+NajIO6rBPbLUmm9qM0Eh2zico5mpSD1Qtn5FM6xYzg==
dependencies:
tippy.js "^6.3.7"
"@tiptap/extension-gapcursor@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-gapcursor/-/extension-gapcursor-2.1.14.tgz#fe36282b66f3652c5f7c2d5031b651b2db2a8898"
integrity sha512-wTT8k3msIUBIj3k28ZB8IUdI4zjnkiYGTqzNXud01hLsPuQWkPerW/LqqiyKfsGKSIJa/l8x4ZzUgJv3ciO9YQ==
"@tiptap/extension-gapcursor@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-gapcursor/-/extension-gapcursor-2.4.0.tgz#2a738509d40f5f856492c11e32b10e4462f71216"
integrity sha512-F4y/0J2lseohkFUw9P2OpKhrJ6dHz69ZScABUvcHxjznJLd6+0Zt7014Lw5PA8/m2d/w0fX8LZQ88pZr4quZPQ==
"@tiptap/extension-hard-break@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-hard-break/-/extension-hard-break-2.1.14.tgz#47eea3742a879bf7849377ffdb3549fddffa4474"
integrity sha512-Nv6JS1dmPiiWDJAcdb6nGns7vD65Gqbqxh/RQeT172G2yXu5TD8EJa0OiEhd1sMcEg7OXbHMLtkDzx57mEuZ7Q==
"@tiptap/extension-hard-break@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-hard-break/-/extension-hard-break-2.4.0.tgz#b5bf5b065827280e450fba8f53d137654509d836"
integrity sha512-3+Z6zxevtHza5IsDBZ4lZqvNR3Kvdqwxq/QKCKu9UhJN1DUjsg/l1Jn2NilSQ3NYkBYh2yJjT8CMo9pQIu776g==
"@tiptap/extension-heading@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-heading/-/extension-heading-2.1.14.tgz#868dd4eff41480d31c0cbab04d6d14a682318259"
integrity sha512-x/AMzMANLvgbuwx4qe848WxF5W1Yq4bUjsduSu/5jonpH2sR5AFsH5VbWS8lfT34OdOI0Gs7p+k2NNuykWDPQA==
"@tiptap/extension-heading@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-heading/-/extension-heading-2.4.0.tgz#16302ce691714244c3d3fa92d2db86a5c895a025"
integrity sha512-fYkyP/VMo7YHO76YVrUjd95Qeo0cubWn/Spavmwm1gLTHH/q7xMtbod2Z/F0wd6QHnc7+HGhO7XAjjKWDjldaw==
"@tiptap/extension-highlight@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-highlight/-/extension-highlight-2.1.14.tgz#ce427a031561e8e89436db0dec969cc9ea1c02af"
integrity sha512-TU12/Hw5FBZuk1/j06UqNVx91Hms0XEEgtz3tOwyWrxbOe4hXILNedzrz3aNoTcLJoqOVefw+VBQLcsK0Ztw/Q==
"@tiptap/extension-highlight@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-highlight/-/extension-highlight-2.4.0.tgz#29a200993b0e599223efac373785089109579fd3"
integrity sha512-p2I/CaMrs6hzpj/dSw6UNobOWTV38yTjPK+B4ShJQ7IN2u/C82KOTOeFfJoFd9KykmpVOVW3w3nKG3ad0HXPuQ==
"@tiptap/extension-history@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-history/-/extension-history-2.1.14.tgz#06863c38511d39d309003f8cc187d38fa800dbe5"
integrity sha512-DN9QeiEv/Y3cCOHVH+/0M18btg7Gebhw7ooT0afanyHS/a5aV/IsgDnw6YRHaMfLUgDD7toOSSbjgGYWZX307w==
"@tiptap/extension-history@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-history/-/extension-history-2.4.0.tgz#1dbf8410c091175627414d48a0d857232a8f4094"
integrity sha512-gr5qsKAXEVGr1Lyk1598F7drTaEtAxqZiuuSwTCzZzkiwgEQsWMWTWc9F8FlneCEaqe1aIYg6WKWlmYPaFwr0w==
"@tiptap/extension-horizontal-rule@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-horizontal-rule/-/extension-horizontal-rule-2.1.14.tgz#23c4ffad88b8554e280344d70592bcd6702215fd"
integrity sha512-n5vNE4rTA3zfLhe0p3k38IJGtEWfvr2QIp5lQuw4/i5TcOrnpfryJwA9tLDTgAdcyvTTGJH5jAXWw9ENxBexQg==
"@tiptap/extension-horizontal-rule@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-horizontal-rule/-/extension-horizontal-rule-2.4.0.tgz#7f27c0778004602686251af7e2f7a8461a3d77ba"
integrity sha512-yDgxy+YxagcEsBbdWvbQiXYxsv3noS1VTuGwc9G7ZK9xPmBHJ5y0agOkB7HskwsZvJHoaSqNRsh7oZTkf0VR3g==
"@tiptap/extension-image@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-image/-/extension-image-2.1.14.tgz#5e5b662afd35d6a41ce9fb1fee72da8e3318e2d9"
integrity sha512-EDwbvBIpyJJDAtIlNNBDPtIIAi0GKEAOcqyB7G0tomyho6QUaO2yFtB37t7OAbM+CQiTBtO2AuJhOYr3354V4A==
"@tiptap/extension-image@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-image/-/extension-image-2.4.0.tgz#21a18e80ed6bc330cf8ab2ca990a3addb40916c8"
integrity sha512-NIVhRPMO/ONo8OywEd+8zh0Q6Q7EbFHtBxVsvfOKj9KtZkaXQfUO4MzONTyptkvAchTpj9pIzeaEY5fyU87gFA==
"@tiptap/extension-italic@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-italic/-/extension-italic-2.1.14.tgz#2542ee68202307ed01bd5e10b65650e99b6efa4f"
integrity sha512-K+n2ts26HNatX3FZ2pYJTFDuMypDyMP4jQ3T11cU908lUT8gHXHBcgh0OW83SX92asbWxUj8xEdDZczi7Qqbew==
"@tiptap/extension-italic@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-italic/-/extension-italic-2.4.0.tgz#42ab003e04e1e8d825f698914c0e80ac849144f1"
integrity sha512-aaW/L9q+KNHHK+X73MPloHeIsT191n3VLd3xm6uUcFDnUNvzYJ/q65/1ZicdtCaOLvTutxdrEvhbkrVREX6a8g==
"@tiptap/extension-link@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-link/-/extension-link-2.1.14.tgz#aa850dddfaec14afe99bd3ba6695d515eecadc92"
integrity sha512-lfZIBaGGWJaX9tZIsAq5WuWk1cIQVM3takU4F5485eN8aM7Nnw/+Se8uSPZeh3rCbiNg5EeGi/eLEZv/L/TLGQ==
"@tiptap/extension-link@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-link/-/extension-link-2.4.0.tgz#e44edfe2f8d878959bd3ad64fda1b9e232f1f011"
integrity sha512-r3PjT0bjSKAorHAEBPA0icSMOlqALbxVlWU9vAc+Q3ndzt7ht0CTPNewzFF9kjzARABVt1cblXP/2+c0qGzcsg==
dependencies:
linkifyjs "^4.1.0"
"@tiptap/extension-list-item@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-list-item/-/extension-list-item-2.1.14.tgz#f17c3b9cff2a710c3307c0298261c81f2a5d7cdd"
integrity sha512-MpOCf0QnbW0qxW4dB7JRMX7qGortjY8QRl1WBmUGBBN54Q712nfgmUmNJmzNYfRU91PN0afdBVibUSchB4LP3Q==
"@tiptap/extension-list-item@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-list-item/-/extension-list-item-2.4.0.tgz#a97a48850b81e94b9a60cc2aa16e515aa5311456"
integrity sha512-reUVUx+2cI2NIAqMZhlJ9uK/+zvRzm1GTmlU2Wvzwc7AwLN4yemj6mBDsmBLEXAKPvitfLh6EkeHaruOGymQtg==
"@tiptap/extension-ordered-list@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-ordered-list/-/extension-ordered-list-2.1.14.tgz#61d198f146d9c71b3809398263b4093616b2e545"
integrity sha512-XwARMGQbTbBOOvG62T4yH2g8OeoLYVaNTKRbiuhIzYekAN/elnydQahcjjE9/Y2Zq54g0nPdgh0LvsjWNWxr8Q==
"@tiptap/extension-ordered-list@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-ordered-list/-/extension-ordered-list-2.4.0.tgz#6cf82e10d7e7f7cc44156d29b0b71a22dec31612"
integrity sha512-Zo0c9M0aowv+2+jExZiAvhCB83GZMjZsxywmuOrdUbq5EGYKb7q8hDyN3hkrktVHr9UPXdPAYTmLAHztTOHYRA==
"@tiptap/extension-paragraph@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-paragraph/-/extension-paragraph-2.1.14.tgz#ec993235ac4ceca5026c0e821d2d72e9d5e3c4f9"
integrity sha512-iWD1nfMvADrx2pwxlQXu2PDnNghhU2EvAOmNOzGOEzkTaELkPR4CDyr/wEi1ewS9dNhhO8EpP8IYVXzd01r8JA==
"@tiptap/extension-paragraph@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-paragraph/-/extension-paragraph-2.4.0.tgz#5b9aea8775937b327bbe6754be12ae3144fb09ff"
integrity sha512-+yse0Ow67IRwcACd9K/CzBcxlpr9OFnmf0x9uqpaWt1eHck1sJnti6jrw5DVVkyEBHDh/cnkkV49gvctT/NyCw==
"@tiptap/extension-strike@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-strike/-/extension-strike-2.1.14.tgz#8a94de5ec6bf726ff0ef73f998bad9d863d3a65d"
integrity sha512-AcFiyUc2eiL3TM5flvExIi+LjukaGzSKGGuLH1Q9e7T4GkfZu7FaSzjP1+2kvgwGAMJxgm5Ybzvugcf9rrNosA==
"@tiptap/extension-strike@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-strike/-/extension-strike-2.4.0.tgz#f09c4f51f7fed01c356026d7e8d8a1d1f2ac8f18"
integrity sha512-pE1uN/fQPOMS3i+zxPYMmPmI3keubnR6ivwM+KdXWOMnBiHl9N4cNpJgq1n2eUUGKLurC2qrQHpnVyGAwBS6Vg==
"@tiptap/extension-subscript@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-subscript/-/extension-subscript-2.1.14.tgz#c6796abbb9d1654e8ab12c1b97354aac85754695"
integrity sha512-HYKqCkP4ncbHJFXxqafZUUHdL8raKqaw/DJ8Ogmk8luOqaFjgOGcgFRhtWyXCbv/BJCL42/0IGeoM5D4aRo/gg==
"@tiptap/extension-subscript@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-subscript/-/extension-subscript-2.4.0.tgz#58c1d364ed8bd817f66b4a498f7cca09737c39ee"
integrity sha512-exLSmSFmYN6AVww5oyroFL3KCwstT0U+ojvVhRD6DQ+Hc81d++lBKANfsWAcllXjZVGPWeMNdE66bV7oFCtQcQ==
"@tiptap/extension-superscript@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-superscript/-/extension-superscript-2.1.14.tgz#1ce5646d5c5a2bbf009c563de14cdf879156811a"
integrity sha512-L44OToFzSULAM+8wfbDa2oW7fekNvsZXn081x2EcF8lTjJXDfK+3nViNfoSY7OAoZKEIF6HTYOPwFmiDM+ZZXg==
"@tiptap/extension-superscript@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-superscript/-/extension-superscript-2.4.0.tgz#804355cc3d1b991e2e346d33f2bfaee04dbcc770"
integrity sha512-s+GsbbERNQCn/hyaw5/82y3wHQ7o5byc/eFAKYo1p3p5eESlDaHY/xVYPt3CGOX2TJWZalgSFEFqBVdTSI8mUQ==
"@tiptap/extension-table-cell@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-table-cell/-/extension-table-cell-2.1.14.tgz#fddcb425485e9b28ffa9873e79e27dde5ce0bf44"
integrity sha512-4cnT35wA/O33H/UTOJUiZJFe0QjHfz8vOVjvkbeYOxcuo5Ww0Ro1D6RWU70fdAdkFccFJZ5UtqG9RcZi+JkNWA==
"@tiptap/extension-table-cell@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-table-cell/-/extension-table-cell-2.4.0.tgz#048d869acbf6cfbcd31076adf8130ffd679990a7"
integrity sha512-zylResMWLvV17Z6+GEDjvvl+YpJqJhNMyJsZPZNx/72OcNCDN3p2d6RGFwhpnCpdzZDD6LGaIgWaTj9oeg53SA==
"@tiptap/extension-table-header@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-table-header/-/extension-table-header-2.1.14.tgz#540cc2a7312f5bd6b13076eea11be608555dfacc"
integrity sha512-hg57IePDTGcr9GEFY5g201DreuXv3MZYp6TjqDGF/O48Yy2v22X+Baaa/SyW9WKPLPInAXBp2f8/2YSBKTuwLg==
"@tiptap/extension-table-header@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-table-header/-/extension-table-header-2.4.0.tgz#618a86bc5e66149661129b7e8fbe2fd363882c2d"
integrity sha512-FZCOyJHSFsMTCfBh49J1DlwgpUIM5Ivpr57Za8FVvUkk8RKUIOKpNsZqxE+Wrw+2Bvy5H4X7Azb588x0NDqfOQ==
"@tiptap/extension-table-row@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-table-row/-/extension-table-row-2.1.14.tgz#42968b0eca9f45f646d5c922818590d1365271b2"
integrity sha512-TE1qLztFerqKbm+ZkR+4tN24ZI6EFB99bYQ7QUaw5v1ioyL4QfDny2vSePKmjNObmgmFNl8I4hBqpuzYq9CzhQ==
"@tiptap/extension-table-row@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-table-row/-/extension-table-row-2.4.0.tgz#751ecd4ce49ebe1ccdea153f27c3a61e4449cfd4"
integrity sha512-K4FDI4YzyLWZbhIZYYL15uqs6M3QsPZGTpTdkSaxcKMLholcskDSHhJmySxnrjI0+JNAtyIiqlWBfA1/9Zyhng==
"@tiptap/extension-table@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-table/-/extension-table-2.1.14.tgz#c50e403c43fa5623088cf5a2cdf11adf180c63d8"
integrity sha512-e/idEukzXSThGKyRHxIaK3zqwLCaZMvm0Xcv2D8X2rnQTRIWr4ZR1+zCgwysFAUT26aDYfMUfmx2kAmuNNdLsg==
"@tiptap/extension-table@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-table/-/extension-table-2.4.0.tgz#a29bb933a10ddbd9469263df0c7527ae1fa1de00"
integrity sha512-ceIUnPSqVCb+qC0XZSgApoG3dL3MRvWrGl1nIMxEqPgMsD/MP6MsYV1Lx/GmtdUlEEsV1624cGTBiRzeCuWkZA==
"@tiptap/extension-task-item@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-task-item/-/extension-task-item-2.1.14.tgz#6e30bf3e02885e3fa978a94d83c92c80d47d1dc2"
integrity sha512-P5/Z1cARREnvpFa3gGvFMUm++OJ4RBS/9NVfwKmfg4Y71/0ZbLpaYrq4TKSa8Zg/lR1Ybx7Y1T9agmDu5D5S1g==
"@tiptap/extension-task-item@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-task-item/-/extension-task-item-2.4.0.tgz#33227e72fcffdf087446f88cdb7a10feab4c2087"
integrity sha512-x40vdHnmDiBbA2pjWR/92wVGb6jT13Nk2AhRUI/oP/r4ZGKpTypoB7heDnvLBgH0Y5a51dFqU+G1SFFL30u5uA==
"@tiptap/extension-task-list@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-task-list/-/extension-task-list-2.1.14.tgz#4c6d12c9d45673bc0dd4e32031b4afa1fe041049"
integrity sha512-yI5vd6L0UC0aJvujjmzCnYfx9K8FExI/kVHd/+AlxGQwG90+XAfj6Tw93GyIu7DhGRq/Goek1Mt+RC09sw4AHQ==
"@tiptap/extension-task-list@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-task-list/-/extension-task-list-2.4.0.tgz#61a500fe4a89d5c789ad4fb64c8d7eeedfe26b63"
integrity sha512-vmUB3wEJU81QbiHUygBlselQW8YIW8/85UTwANvWx8+KEWyM7EUF4utcm5R2UobIprIcWb4hyVkvW/5iou25gg==
"@tiptap/extension-text@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/extension-text/-/extension-text-2.1.14.tgz#a781c68fe348bdd08730e727f2380295dc190260"
integrity sha512-Z5g+SlWqnK2loIwqkg2LzsVKVCiMyUfDD8IhNJsny0BRbWKFs4SKPCkAcyCxLK2h8Jm/BG6PyfGHsF/2wx7I3Q==
"@tiptap/extension-text@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/extension-text/-/extension-text-2.4.0.tgz#a3a5f45a9856d513e574f24e2c9b6028273f8eb3"
integrity sha512-LV0bvE+VowE8IgLca7pM8ll7quNH+AgEHRbSrsI3SHKDCYB9gTHMjWaAkgkUVaO1u0IfCrjnCLym/PqFKa+vvg==
"@tiptap/pm@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/pm/-/pm-2.1.14.tgz#b504120adfa428e4eb0526d5ea2b4ba5ed55400b"
integrity sha512-UuHqLDFPEPVLk4iopdHFpnn9KPNmbwQ8M0lnDRK1a9ZBheQpdTj6mQYFteYGKdqJpfcbhLHvmYl8nthfzlXGYw==
"@tiptap/pm@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/pm/-/pm-2.4.0.tgz#f6fe81d24569da584658d2e8a3a378aea3619fb3"
integrity sha512-B1HMEqGS4MzIVXnpgRZDLm30mxDWj51LkBT/if1XD+hj5gm8B9Q0c84bhvODX6KIs+c6z+zsY9VkVu8w9Yfgxg==
dependencies:
prosemirror-changeset "^2.2.0"
prosemirror-collab "^1.3.0"
prosemirror-commands "^1.3.1"
prosemirror-dropcursor "^1.5.0"
prosemirror-gapcursor "^1.3.1"
prosemirror-history "^1.3.0"
prosemirror-inputrules "^1.2.0"
prosemirror-keymap "^1.2.0"
prosemirror-markdown "^1.10.1"
prosemirror-menu "^1.2.1"
prosemirror-model "^1.18.1"
prosemirror-schema-basic "^1.2.0"
prosemirror-schema-list "^1.2.2"
prosemirror-state "^1.4.1"
prosemirror-tables "^1.3.0"
prosemirror-trailing-node "^2.0.2"
prosemirror-transform "^1.7.0"
prosemirror-view "^1.28.2"
prosemirror-changeset "^2.2.1"
prosemirror-collab "^1.3.1"
prosemirror-commands "^1.5.2"
prosemirror-dropcursor "^1.8.1"
prosemirror-gapcursor "^1.3.2"
prosemirror-history "^1.3.2"
prosemirror-inputrules "^1.3.0"
prosemirror-keymap "^1.2.2"
prosemirror-markdown "^1.12.0"
prosemirror-menu "^1.2.4"
prosemirror-model "^1.19.4"
prosemirror-schema-basic "^1.2.2"
prosemirror-schema-list "^1.3.0"
prosemirror-state "^1.4.3"
prosemirror-tables "^1.3.5"
prosemirror-trailing-node "^2.0.7"
prosemirror-transform "^1.8.0"
prosemirror-view "^1.32.7"
"@tiptap/suggestion@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/suggestion/-/suggestion-2.1.14.tgz#baae04da4cb1cb9431f85cc90456185851d5b167"
integrity sha512-8jx+RYY4cZ3ZFmHDm4fPhHN6N8fwIgFnB6iBTbEh5Ra+0Bvh1q+Ek21+Ni92ORjmYz9Vy1e5xxJMyGNywRS5dw==
"@tiptap/suggestion@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/suggestion/-/suggestion-2.4.0.tgz#1926cde5f197d116baf7794f55bd971245540e5c"
integrity sha512-6dCkjbL8vIzcLWtS6RCBx0jlYPKf2Beuyq5nNLrDDZZuyJow5qJAY0eGu6Xomp9z0WDK/BYOxT4hHNoGMDkoAg==
"@tiptap/vue-2@^2.1.14":
version "2.1.14"
resolved "https://registry.yarnpkg.com/@tiptap/vue-2/-/vue-2-2.1.14.tgz#59acb020a0ace9946d6f78572ae8a7629826b934"
integrity sha512-7kisS73LLnFlmRe0T7o/FBlZghaBsZDty1qfAuS+TrhgW1o5wvCtm8Ogoomai2ExWK6Lh8k7E3nc2lDS03s/aw==
"@tiptap/vue-2@^2.4.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@tiptap/vue-2/-/vue-2-2.4.0.tgz#ce5fcc9e8011782c9a92be1b3b51524523226c47"
integrity sha512-1XMkPAbzn3m5moP2D8IoYP2Kl4bkC3WWa38TJTEtJf8YOgia5nakHct+c7QAjN37j0TArbPa5MxXddKp/CqyRA==
dependencies:
"@tiptap/extension-bubble-menu" "^2.1.14"
"@tiptap/extension-floating-menu" "^2.1.14"
"@tiptap/extension-bubble-menu" "^2.4.0"
"@tiptap/extension-floating-menu" "^2.4.0"
vue-ts-types "^1.6.0"
"@tootallnate/once@2":
@ -3126,16 +3091,6 @@
resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e"
integrity sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA==
"@types/object.omit@^3.0.0":
version "3.0.0"
resolved "https://registry.yarnpkg.com/@types/object.omit/-/object.omit-3.0.0.tgz#0d31e1208eac8fe2ad5c9499a1016a8273bbfafc"
integrity sha512-I27IoPpH250TUzc9FzXd0P1BV/BMJuzqD3jOz98ehf9dQqGkxlq+hO1bIqZGWqCg5bVOy0g4AUVJtnxe0klDmw==
"@types/object.pick@^1.3.1":
version "1.3.2"
resolved "https://registry.yarnpkg.com/@types/object.pick/-/object.pick-1.3.2.tgz#9eb28118240ad8f658b9c9c6caf35359fdb37150"
integrity sha512-sn7L+qQ6RLPdXRoiaE7bZ/Ek+o4uICma/lBFPyJEKDTPTBP1W8u0c4baj3EiS4DiqLs+Hk+KUGvMVJtAw3ePJg==
"@types/parse5@^5":
version "5.0.0"
resolved "https://registry.yarnpkg.com/@types/parse5/-/parse5-5.0.0.tgz#9ae2106efc443d7c1e26570aa8247828c9c80f11"
@ -3210,11 +3165,6 @@
resolved "https://registry.yarnpkg.com/@types/strip-json-comments/-/strip-json-comments-0.0.30.tgz#9aa30c04db212a9a0649d6ae6fd50accc40748a1"
integrity sha512-7NQmHra/JILCd1QqpSzl8+mJRc8ZHz3uDm8YV1Ks9IhK0epEiTw8aIErbvH9PI+6XbqhyIQy3462nEsn7UVzjQ==
"@types/throttle-debounce@^2.1.0":
version "2.1.0"
resolved "https://registry.yarnpkg.com/@types/throttle-debounce/-/throttle-debounce-2.1.0.tgz#1c3df624bfc4b62f992d3012b84c56d41eab3776"
integrity sha512-5eQEtSCoESnh2FsiLTxE121IiE60hnMqcb435fShf4bpLRjEu1Eoekht23y6zXS9Ts3l+Szu3TARnTsA0GkOkQ==
"@types/tough-cookie@*":
version "4.0.2"
resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.2.tgz#6286b4c7228d58ab7866d19716f3696e03a09397"
@ -4646,11 +4596,6 @@ canvas-confetti@^1.4.0:
resolved "https://registry.yarnpkg.com/canvas-confetti/-/canvas-confetti-1.4.0.tgz#840f6db4a566f8f32abe28c00dcd82acf39c92bd"
integrity sha512-S18o4Y9PqI/uabdlT/jI3MY7XBJjNxnfapFIkjkMwpz6qNxLFZOm2b22OMf4ZYDL9lpNWI+Ih4fEMVPwO1KHFQ==
case-anything@^2.1.10:
version "2.1.10"
resolved "https://registry.yarnpkg.com/case-anything/-/case-anything-2.1.10.tgz#d18a6ca968d54ec3421df71e3e190f3bced23410"
integrity sha512-JczJwVrCP0jPKh05McyVsuOg6AYosrB9XWZKbQzXeDAm2ClE/PJE/BcrrQrVyGYH7Jg8V/LDupmyL4kFlVsVFQ==
ccount@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/ccount/-/ccount-2.0.1.tgz#17a3bf82302e0870d6da43a01311a8bc02a3ecf5"
@ -5850,11 +5795,6 @@ dagre-d3-es@7.0.10:
d3 "^7.8.2"
lodash-es "^4.17.21"
dash-get@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/dash-get/-/dash-get-1.0.2.tgz#4c9e9ad5ef04c4bf9d3c9a451f6f7997298dcc7c"
integrity sha512-4FbVrHDwfOASx7uQVxeiCTo7ggSdYZbqs8lH+WU6ViypPlDbe9y6IP5VVUDQBv9DcnyaiPT5XT0UWHgJ64zLeQ==
data-urls@^3.0.1:
version "3.0.2"
resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-3.0.2.tgz#9cf24a477ae22bcef5cd5f6f0bfbc1d2d3be9143"
@ -8406,7 +8346,7 @@ is-extendable@^0.1.0, is-extendable@^0.1.1:
resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89"
integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=
is-extendable@^1.0.0, is-extendable@^1.0.1:
is-extendable@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4"
integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==
@ -9670,11 +9610,6 @@ make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0:
dependencies:
semver "^6.0.0"
make-error@^1.3.6:
version "1.3.6"
resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2"
integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==
makeerror@1.0.12:
version "1.0.12"
resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a"
@ -9694,7 +9629,7 @@ map-visit@^1.0.0:
dependencies:
object-visit "^1.0.0"
markdown-it@14.0.0:
markdown-it@14.0.0, markdown-it@^14.0.0:
version "14.0.0"
resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-14.0.0.tgz#b4b2ddeb0f925e88d981f84c183b59bac9e3741b"
integrity sha512-seFjF0FIcPt4P9U39Bq1JYblX0KZCjDLFFQPHpL5AzHpqPEKtosxmdq/LTVZnjfH7tjt9BxStm+wXcDBNuYmzw==
@ -10983,13 +10918,6 @@ object.groupby@^1.0.1:
es-abstract "^1.22.1"
get-intrinsic "^1.2.1"
object.omit@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/object.omit/-/object.omit-3.0.0.tgz#0e3edc2fce2ba54df5577ff529f6d97bd8a522af"
integrity sha512-EO+BCv6LJfu+gBIF3ggLicFebFLN5zqzz/WWJlMFfkMyGth+oBkhxzDl0wx2W4GkLzuQs/FsSkXZb2IMWQqmBQ==
dependencies:
is-extendable "^1.0.0"
object.pick@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747"
@ -11629,74 +11557,75 @@ property-information@^6.0.0:
resolved "https://registry.yarnpkg.com/property-information/-/property-information-6.1.1.tgz#5ca85510a3019726cb9afed4197b7b8ac5926a22"
integrity sha512-hrzC564QIl0r0vy4l6MvRLhafmUowhO/O3KgVSoXIbbA2Sz4j8HGpJc6T2cubRVwMwpdiG/vKGfhT4IixmKN9w==
prosemirror-changeset@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/prosemirror-changeset/-/prosemirror-changeset-2.2.0.tgz#22c05da271a118be40d3e339fa2cace789b1254b"
integrity sha512-QM7ohGtkpVpwVGmFb8wqVhaz9+6IUXcIQBGZ81YNAKYuHiFJ1ShvSzab4pKqTinJhwciZbrtBEk/2WsqSt2PYg==
prosemirror-changeset@^2.2.1:
version "2.2.1"
resolved "https://registry.yarnpkg.com/prosemirror-changeset/-/prosemirror-changeset-2.2.1.tgz#dae94b63aec618fac7bb9061648e6e2a79988383"
integrity sha512-J7msc6wbxB4ekDFj+n9gTW/jav/p53kdlivvuppHsrZXCaQdVgRghoZbSS3kwrRyAstRVQ4/+u5k7YfLgkkQvQ==
dependencies:
prosemirror-transform "^1.0.0"
prosemirror-collab@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/prosemirror-collab/-/prosemirror-collab-1.3.0.tgz#601d33473bf72e6c43041a54b860c84c60b37769"
integrity sha512-+S/IJ69G2cUu2IM5b3PBekuxs94HO1CxJIWOFrLQXUaUDKL/JfBx+QcH31ldBlBXyDEUl+k3Vltfi1E1MKp2mA==
prosemirror-collab@^1.3.1:
version "1.3.1"
resolved "https://registry.yarnpkg.com/prosemirror-collab/-/prosemirror-collab-1.3.1.tgz#0e8c91e76e009b53457eb3b3051fb68dad029a33"
integrity sha512-4SnynYR9TTYaQVXd/ieUvsVV4PDMBzrq2xPUWutHivDuOshZXqQ5rGbZM84HEaXKbLdItse7weMGOUdDVcLKEQ==
dependencies:
prosemirror-state "^1.0.0"
prosemirror-commands@^1.0.0, prosemirror-commands@^1.3.1:
version "1.5.0"
resolved "https://registry.yarnpkg.com/prosemirror-commands/-/prosemirror-commands-1.5.0.tgz#d10efece1647c1d984fef6f65d52fdc77785560b"
integrity sha512-zL0Fxbj3fh71GPNHn5YdYgYGX2aU2XLecZYk2ekEF0oOD259HcXtM+96VjPVi5o3h4sGUdDfEEhGiREXW6U+4A==
prosemirror-commands@^1.0.0, prosemirror-commands@^1.5.2:
version "1.5.2"
resolved "https://registry.yarnpkg.com/prosemirror-commands/-/prosemirror-commands-1.5.2.tgz#e94aeea52286f658cd984270de9b4c3fff580852"
integrity sha512-hgLcPaakxH8tu6YvVAaILV2tXYsW3rAdDR8WNkeKGcgeMVQg3/TMhPdVoh7iAmfgVjZGtcOSjKiQaoeKjzd2mQ==
dependencies:
prosemirror-model "^1.0.0"
prosemirror-state "^1.0.0"
prosemirror-transform "^1.0.0"
prosemirror-dropcursor@^1.5.0:
version "1.7.0"
resolved "https://registry.yarnpkg.com/prosemirror-dropcursor/-/prosemirror-dropcursor-1.7.0.tgz#a846ba49414dcd99cf8fc8bb26e4f9f24b8f09d0"
integrity sha512-vzab/iPd3CjWILFv6WJz4+BlOwCywOcAGhvY5G/66OYPcaZehN8IVbGtHCV3oyhXk2yAA67nwMv/oNMvBV9k1A==
prosemirror-dropcursor@^1.8.1:
version "1.8.1"
resolved "https://registry.yarnpkg.com/prosemirror-dropcursor/-/prosemirror-dropcursor-1.8.1.tgz#49b9fb2f583e0d0f4021ff87db825faa2be2832d"
integrity sha512-M30WJdJZLyXHi3N8vxN6Zh5O8ZBbQCz0gURTfPmTIBNQ5pxrdU7A58QkNqfa98YEjSAL1HUyyU34f6Pm5xBSGw==
dependencies:
prosemirror-state "^1.0.0"
prosemirror-transform "^1.1.0"
prosemirror-view "^1.1.0"
prosemirror-gapcursor@^1.3.1:
version "1.3.1"
resolved "https://registry.yarnpkg.com/prosemirror-gapcursor/-/prosemirror-gapcursor-1.3.1.tgz#8cfd874592e4504d63720e14ed680c7866e64554"
integrity sha512-GKTeE7ZoMsx5uVfc51/ouwMFPq0o8YrZ7Hx4jTF4EeGbXxBveUV8CGv46mSHuBBeXGmvu50guoV2kSnOeZZnUA==
prosemirror-gapcursor@^1.3.2:
version "1.3.2"
resolved "https://registry.yarnpkg.com/prosemirror-gapcursor/-/prosemirror-gapcursor-1.3.2.tgz#5fa336b83789c6199a7341c9493587e249215cb4"
integrity sha512-wtjswVBd2vaQRrnYZaBCbyDqr232Ed4p2QPtRIUK5FuqHYKGWkEwl08oQM4Tw7DOR0FsasARV5uJFvMZWxdNxQ==
dependencies:
prosemirror-keymap "^1.0.0"
prosemirror-model "^1.0.0"
prosemirror-state "^1.0.0"
prosemirror-view "^1.0.0"
prosemirror-history@^1.0.0, prosemirror-history@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/prosemirror-history/-/prosemirror-history-1.3.0.tgz#bf5a1ff7759aca759ddf0c722c2fa5b14fb0ddc1"
integrity sha512-qo/9Wn4B/Bq89/YD+eNWFbAytu6dmIM85EhID+fz9Jcl9+DfGEo8TTSrRhP15+fFEoaPqpHSxlvSzSEbmlxlUA==
prosemirror-history@^1.0.0, prosemirror-history@^1.3.2:
version "1.4.0"
resolved "https://registry.yarnpkg.com/prosemirror-history/-/prosemirror-history-1.4.0.tgz#1edbce630aaf21b808e5a5cd798a09976ecb1827"
integrity sha512-UUiGzDVcqo1lovOPdi9YxxUps3oBFWAIYkXLu3Ot+JPv1qzVogRbcizxK3LhHmtaUxclohgiOVesRw5QSlMnbQ==
dependencies:
prosemirror-state "^1.2.2"
prosemirror-transform "^1.0.0"
prosemirror-view "^1.31.0"
rope-sequence "^1.3.0"
prosemirror-inputrules@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/prosemirror-inputrules/-/prosemirror-inputrules-1.2.0.tgz#476dde2dc244050b3aca00cf58a82adfad6749e7"
integrity sha512-eAW/M/NTSSzpCOxfR8Abw6OagdG0MiDAiWHQMQveIsZtoKVYzm0AflSPq/ymqJd56/Su1YPbwy9lM13wgHOFmQ==
prosemirror-inputrules@^1.3.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/prosemirror-inputrules/-/prosemirror-inputrules-1.4.0.tgz#ef1519bb2cb0d1e0cec74bad1a97f1c1555068bb"
integrity sha512-6ygpPRuTJ2lcOXs9JkefieMst63wVJBgHZGl5QOytN7oSZs3Co/BYbc3Yx9zm9H37Bxw8kVzCnDsihsVsL4yEg==
dependencies:
prosemirror-state "^1.0.0"
prosemirror-transform "^1.0.0"
prosemirror-keymap@^1.0.0, prosemirror-keymap@^1.1.2, prosemirror-keymap@^1.2.0:
version "1.2.1"
resolved "https://registry.yarnpkg.com/prosemirror-keymap/-/prosemirror-keymap-1.2.1.tgz#3839e7db66cecddae7451f4246e73bdd8489be1d"
integrity sha512-kVK6WGC+83LZwuSJnuCb9PsADQnFZllt94qPP3Rx/vLcOUV65+IbBeH2nS5cFggPyEVJhGkGrgYFRrG250WhHQ==
prosemirror-keymap@^1.0.0, prosemirror-keymap@^1.1.2, prosemirror-keymap@^1.2.2:
version "1.2.2"
resolved "https://registry.yarnpkg.com/prosemirror-keymap/-/prosemirror-keymap-1.2.2.tgz#14a54763a29c7b2704f561088ccf3384d14eb77e"
integrity sha512-EAlXoksqC6Vbocqc0GtzCruZEzYgrn+iiGnNjsJsH4mrnIGex4qbLdWWNza3AW5W36ZRrlBID0eM6bdKH4OStQ==
dependencies:
prosemirror-state "^1.0.0"
w3c-keyname "^2.2.0"
prosemirror-markdown@1.11.2, prosemirror-markdown@^1.10.1:
prosemirror-markdown@1.11.2:
version "1.11.2"
resolved "https://registry.yarnpkg.com/prosemirror-markdown/-/prosemirror-markdown-1.11.2.tgz#f6e529e669d11fa3eec859e93c0d2c91788d6c80"
integrity sha512-Eu5g4WPiCdqDTGhdSsG9N6ZjACQRYrsAkrF9KYfdMaCmjIApH75aVncsWYOJvEk2i1B3i8jZppv3J/tnuHGiUQ==
@ -11704,52 +11633,60 @@ prosemirror-markdown@1.11.2, prosemirror-markdown@^1.10.1:
markdown-it "^13.0.1"
prosemirror-model "^1.0.0"
prosemirror-menu@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/prosemirror-menu/-/prosemirror-menu-1.2.1.tgz#94d99a8547b7ba5680c20e9c497ce19846ce3b2c"
integrity sha512-sBirXxVfHalZO4f1ZS63WzewINK4182+7dOmoMeBkqYO8wqMBvBS7wQuwVOHnkMWPEh0+N0LJ856KYUN+vFkmQ==
prosemirror-markdown@^1.12.0:
version "1.12.0"
resolved "https://registry.yarnpkg.com/prosemirror-markdown/-/prosemirror-markdown-1.12.0.tgz#d2de09d37897abf7adb6293d925ff132dac5b0a6"
integrity sha512-6F5HS8Z0HDYiS2VQDZzfZP6A0s/I0gbkJy8NCzzDMtcsz3qrfqyroMMeoSjAmOhDITyon11NbXSzztfKi+frSQ==
dependencies:
markdown-it "^14.0.0"
prosemirror-model "^1.0.0"
prosemirror-menu@^1.2.4:
version "1.2.4"
resolved "https://registry.yarnpkg.com/prosemirror-menu/-/prosemirror-menu-1.2.4.tgz#3cfdc7c06d10f9fbd1bce29082c498bd11a0a79a"
integrity sha512-S/bXlc0ODQup6aiBbWVsX/eM+xJgCTAfMq/nLqaO5ID/am4wS0tTCIkzwytmao7ypEtjj39i7YbJjAgO20mIqA==
dependencies:
crelt "^1.0.0"
prosemirror-commands "^1.0.0"
prosemirror-history "^1.0.0"
prosemirror-state "^1.0.0"
prosemirror-model@^1.0.0, prosemirror-model@^1.16.0, prosemirror-model@^1.18.1, prosemirror-model@^1.19.0, prosemirror-model@^1.8.1:
version "1.19.0"
resolved "https://registry.yarnpkg.com/prosemirror-model/-/prosemirror-model-1.19.0.tgz#d7ad9a65ada0bb12196f64fe0dd4fc392c841c29"
integrity sha512-/CvFGJnwc41EJSfDkQLly1cAJJJmBpZwwUJtwZPTjY2RqZJfM8HVbCreOY/jti8wTRbVyjagcylyGoeJH/g/3w==
prosemirror-model@^1.0.0, prosemirror-model@^1.19.0, prosemirror-model@^1.19.4, prosemirror-model@^1.20.0, prosemirror-model@^1.21.0, prosemirror-model@^1.8.1:
version "1.21.0"
resolved "https://registry.yarnpkg.com/prosemirror-model/-/prosemirror-model-1.21.0.tgz#2d69ed04b4e7c441c3eb87c1c964fab4f9b217df"
integrity sha512-zLpS1mVCZLA7VTp82P+BfMiYVPcX1/z0Mf3gsjKZtzMWubwn2pN7CceMV0DycjlgE5JeXPR7UF4hJPbBV98oWA==
dependencies:
orderedmap "^2.0.0"
prosemirror-schema-basic@^1.0.0, prosemirror-schema-basic@^1.2.0:
version "1.2.1"
resolved "https://registry.yarnpkg.com/prosemirror-schema-basic/-/prosemirror-schema-basic-1.2.1.tgz#a5a137a6399d1a829873332117d2fe8131d291d0"
integrity sha512-vYBdIHsYKSDIqYmPBC7lnwk9DsKn8PnVqK97pMYP5MLEDFqWIX75JiaJTzndBii4bRuNqhC2UfDOfM3FKhlBHg==
prosemirror-schema-basic@^1.0.0, prosemirror-schema-basic@^1.2.2:
version "1.2.2"
resolved "https://registry.yarnpkg.com/prosemirror-schema-basic/-/prosemirror-schema-basic-1.2.2.tgz#6695f5175e4628aab179bf62e5568628b9cfe6c7"
integrity sha512-/dT4JFEGyO7QnNTe9UaKUhjDXbTNkiWTq/N4VpKaF79bBjSExVV2NXmJpcM7z/gD7mbqNjxbmWW5nf1iNSSGnw==
dependencies:
prosemirror-model "^1.19.0"
prosemirror-schema-list@^1.0.0, prosemirror-schema-list@^1.2.2:
version "1.2.2"
resolved "https://registry.yarnpkg.com/prosemirror-schema-list/-/prosemirror-schema-list-1.2.2.tgz#bafda37b72367d39accdcaf6ddf8fb654a16e8e5"
integrity sha512-rd0pqSDp86p0MUMKG903g3I9VmElFkQpkZ2iOd3EOVg1vo5Cst51rAsoE+5IPy0LPXq64eGcCYlW1+JPNxOj2w==
prosemirror-schema-list@^1.0.0, prosemirror-schema-list@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/prosemirror-schema-list/-/prosemirror-schema-list-1.3.0.tgz#05374702cf35a3ba5e7ec31079e355a488d52519"
integrity sha512-Hz/7gM4skaaYfRPNgr421CU4GSwotmEwBVvJh5ltGiffUJwm7C8GfN/Bc6DR1EKEp5pDKhODmdXXyi9uIsZl5A==
dependencies:
prosemirror-model "^1.0.0"
prosemirror-state "^1.0.0"
prosemirror-transform "^1.0.0"
prosemirror-transform "^1.7.3"
prosemirror-state@^1.0.0, prosemirror-state@^1.2.2, prosemirror-state@^1.3.1, prosemirror-state@^1.4.1:
version "1.4.2"
resolved "https://registry.yarnpkg.com/prosemirror-state/-/prosemirror-state-1.4.2.tgz#f93bd8a33a4454efab917ba9b738259d828db7e5"
integrity sha512-puuzLD2mz/oTdfgd8msFbe0A42j5eNudKAAPDB0+QJRw8cO1ygjLmhLrg9RvDpf87Dkd6D4t93qdef00KKNacQ==
prosemirror-state@^1.0.0, prosemirror-state@^1.2.2, prosemirror-state@^1.3.1, prosemirror-state@^1.4.3:
version "1.4.3"
resolved "https://registry.yarnpkg.com/prosemirror-state/-/prosemirror-state-1.4.3.tgz#94aecf3ffd54ec37e87aa7179d13508da181a080"
integrity sha512-goFKORVbvPuAQaXhpbemJFRKJ2aixr+AZMGiquiqKxaucC6hlpHNZHWgz5R7dS4roHiwq9vDctE//CZ++o0W1Q==
dependencies:
prosemirror-model "^1.0.0"
prosemirror-transform "^1.0.0"
prosemirror-view "^1.27.0"
prosemirror-tables@^1.3.0:
version "1.3.2"
resolved "https://registry.yarnpkg.com/prosemirror-tables/-/prosemirror-tables-1.3.2.tgz#ca208c6a55d510af14b652d23e800e00ba6bebd4"
integrity sha512-/9JTeN6s58Zq66HXaxP6uf8PAmc7XXKZFPlOGVtLvxEd6xBP6WtzaJB9wBjiGUzwbdhdMEy7V62yuHqk/3VrnQ==
prosemirror-tables@^1.3.5:
version "1.3.7"
resolved "https://registry.yarnpkg.com/prosemirror-tables/-/prosemirror-tables-1.3.7.tgz#9d296bd432d2bc7dca90f14e5c3b5c5f61277f7a"
integrity sha512-oEwX1wrziuxMtwFvdDWSFHVUWrFJWt929kVVfHvtTi8yvw+5ppxjXZkMG/fuTdFo+3DXyIPSKfid+Be1npKXDA==
dependencies:
prosemirror-keymap "^1.1.2"
prosemirror-model "^1.8.1"
@ -11766,29 +11703,27 @@ prosemirror-test-builder@^1.1.1:
prosemirror-schema-basic "^1.0.0"
prosemirror-schema-list "^1.0.0"
prosemirror-trailing-node@^2.0.2:
version "2.0.3"
resolved "https://registry.yarnpkg.com/prosemirror-trailing-node/-/prosemirror-trailing-node-2.0.3.tgz#213fc0e545a434ff3c37b5218a0de69561bf3892"
integrity sha512-lGrjMrn97KWkjQSW/FjdvnhJmqFACmQIyr6lKYApvHitDnKsCoZz6XzrHB7RZYHni/0NxQmZ01p/2vyK2SkvaA==
prosemirror-trailing-node@^2.0.7:
version "2.0.8"
resolved "https://registry.yarnpkg.com/prosemirror-trailing-node/-/prosemirror-trailing-node-2.0.8.tgz#233ddcbda72de06f9b5d758d2a65a8cac482ea10"
integrity sha512-ujRYhSuhQb1Jsarh1IHqb2KoSnRiD7wAMDGucP35DN7j5af6X7B18PfdPIrbwsPTqIAj0fyOvxbuPsWhNvylmA==
dependencies:
"@babel/runtime" "^7.13.10"
"@remirror/core-constants" "^2.0.0"
"@remirror/core-helpers" "^2.0.1"
"@remirror/core-constants" "^2.0.2"
escape-string-regexp "^4.0.0"
prosemirror-transform@^1.0.0, prosemirror-transform@^1.1.0, prosemirror-transform@^1.2.1, prosemirror-transform@^1.7.0:
version "1.7.1"
resolved "https://registry.yarnpkg.com/prosemirror-transform/-/prosemirror-transform-1.7.1.tgz#b516e818c3add0bdf960f4ca8ccb9d057a3ba21b"
integrity sha512-VteoifAfpt46z0yEt6Fc73A5OID9t/y2QIeR5MgxEwTuitadEunD/V0c9jQW8ziT8pbFM54uTzRLJ/nLuQjMxg==
prosemirror-transform@^1.0.0, prosemirror-transform@^1.1.0, prosemirror-transform@^1.2.1, prosemirror-transform@^1.7.3, prosemirror-transform@^1.8.0:
version "1.9.0"
resolved "https://registry.yarnpkg.com/prosemirror-transform/-/prosemirror-transform-1.9.0.tgz#81fd1fbd887929a95369e6dd3d240c23c19313f8"
integrity sha512-5UXkr1LIRx3jmpXXNKDhv8OyAOeLTGuXNwdVfg8x27uASna/wQkr9p6fD3eupGOi4PLJfbezxTyi/7fSJypXHg==
dependencies:
prosemirror-model "^1.0.0"
prosemirror-model "^1.21.0"
prosemirror-view@^1.0.0, prosemirror-view@^1.1.0, prosemirror-view@^1.13.3, prosemirror-view@^1.27.0, prosemirror-view@^1.28.2:
version "1.30.1"
resolved "https://registry.yarnpkg.com/prosemirror-view/-/prosemirror-view-1.30.1.tgz#7cf0ae8dc8553a02c32961e82eca25079c4d8fc9"
integrity sha512-pZUfr7lICJkEY7XwzldAKrkflZDeIvnbfuu2RIS01N5NwJmR/dfZzDzJRzhb3SM2QtT/bM8b4Nnib8X3MGpAhA==
prosemirror-view@^1.0.0, prosemirror-view@^1.1.0, prosemirror-view@^1.13.3, prosemirror-view@^1.27.0, prosemirror-view@^1.31.0, prosemirror-view@^1.32.7:
version "1.33.6"
resolved "https://registry.yarnpkg.com/prosemirror-view/-/prosemirror-view-1.33.6.tgz#85804eb922411af8e300a07f4f376722b15900b9"
integrity sha512-zRLUNgLIQfd8IfGprsXxWTjdA8xEAFJe8cDNrOptj6Mop9sj+BMeVbJvceyAYCm5G2dOdT2prctH7K9dfnpIMw==
dependencies:
prosemirror-model "^1.16.0"
prosemirror-model "^1.20.0"
prosemirror-state "^1.0.0"
prosemirror-transform "^1.1.0"
@ -13814,11 +13749,6 @@ type-fest@^0.8.1:
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d"
integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==
type-fest@^2.0.0:
version "2.19.0"
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-2.19.0.tgz#88068015bb33036a598b952e55e9311a60fd3a9b"
integrity sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==
type-is@~1.6.18:
version "1.6.18"
resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131"