Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
f2109f5c55
commit
539748dfe7
|
|
@ -8,8 +8,5 @@ MinAlertLevel = suggestion
|
|||
[*.md]
|
||||
BasedOnStyles = gitlab_base, gitlab_docs
|
||||
|
||||
# Disable the front matter check until we migrate titles to Hugo format
|
||||
gitlab_docs.FrontMatter = NO
|
||||
|
||||
# Ignore SVG markup
|
||||
TokenIgnores = (\*\*\{\w*\}\*\*)
|
||||
|
|
|
|||
|
|
@ -1,14 +1,20 @@
|
|||
import Vue from 'vue';
|
||||
import { GlDisclosureDropdown } from '@gitlab/ui';
|
||||
|
||||
function getMenuItems(container) {
|
||||
return JSON.parse(container.querySelector('script').textContent);
|
||||
}
|
||||
|
||||
export const OptionsMenuAdapter = {
|
||||
clicks: {
|
||||
toggleOptionsMenu(event) {
|
||||
const button = event.target.closest('.js-options-button');
|
||||
const menuContainer = button.parentElement;
|
||||
const items = getMenuItems(menuContainer);
|
||||
|
||||
if (!this.sink.optionsMenu) {
|
||||
this.sink.optionsMenu = new Vue({
|
||||
el: Vue.version.startsWith('2') ? button : button.parentElement,
|
||||
el: Vue.version.startsWith('2') ? button : menuContainer,
|
||||
name: 'GlDisclosureDropdown',
|
||||
render: (createElement = Vue.h) =>
|
||||
createElement(GlDisclosureDropdown, {
|
||||
|
|
@ -18,6 +24,7 @@ export const OptionsMenuAdapter = {
|
|||
noCaret: true,
|
||||
category: 'tertiary',
|
||||
size: 'small',
|
||||
items,
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
|
|
|||
|
|
@ -11,6 +11,10 @@
|
|||
-# * toggle file comments
|
||||
-# * submodule compare
|
||||
|
||||
- view_title = _('View file @ %{commitSha}') % { commitSha: Commit.truncate_sha(@diff_file.content_sha) }
|
||||
- view_href = project_blob_path(@diff_file.repository.project, helpers.tree_join(@diff_file.content_sha, @diff_file.new_path))
|
||||
- options_menu_items = [ { "text": "#{view_title}", "href": "#{view_href}" } ].to_json
|
||||
|
||||
.rd-diff-file-header{ data: { testid: 'rd-diff-file-header' } }
|
||||
.rd-diff-file-toggle<
|
||||
= render Pajamas::ButtonComponent.new(category: :tertiary, size: :small, icon: 'chevron-down', button_options: { class: 'rd-diff-file-toggle-button', data: { opened: '', click: 'toggleFile' }, aria: { label: _('Hide file contents') } })
|
||||
|
|
@ -51,4 +55,7 @@
|
|||
%span{ "data-testid" => "js-file-deletion-line" }= @diff_file.removed_lines
|
||||
.rd-diff-file-options-menu
|
||||
.js-options-menu
|
||||
-# <script> here is likely the most effective way to minimize bytes: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/182850#note_2387011092
|
||||
%script{ type: "application/json" }
|
||||
= options_menu_items.html_safe
|
||||
= render Pajamas::ButtonComponent.new(category: :tertiary, size: :small, icon: 'ellipsis_v', button_options: { class: 'js-options-button', data: { click: 'toggleOptionsMenu' }, aria: { label: _('Options') } })
|
||||
|
|
|
|||
|
|
@ -817,6 +817,14 @@ class Issue < ApplicationRecord
|
|||
project.autoclose_referenced_issues
|
||||
end
|
||||
|
||||
def epic_work_item?
|
||||
work_item_type&.epic?
|
||||
end
|
||||
|
||||
def group_epic_work_item?
|
||||
epic_work_item? && group_level?
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def project_level_readable_by?(user)
|
||||
|
|
|
|||
|
|
@ -13,6 +13,12 @@ class IssuePolicy < IssuablePolicy
|
|||
false
|
||||
end
|
||||
|
||||
# Not available in FOSS.
|
||||
# This method is Overridden in EE
|
||||
def project_work_item_epics_available?
|
||||
false
|
||||
end
|
||||
|
||||
# rubocop:disable Cop/UserAdmin -- specifically check the admin attribute
|
||||
desc "User can read confidential issues"
|
||||
condition(:can_read_confidential) do
|
||||
|
|
@ -47,17 +53,22 @@ class IssuePolicy < IssuablePolicy
|
|||
end
|
||||
end
|
||||
|
||||
# group level issues license for now is equivalent to epics license. We'll have to migrate epics license to
|
||||
# work items context once epics are fully migrated to work items.
|
||||
condition(:group_level_issues_license_available) do
|
||||
epics_license_available?
|
||||
end
|
||||
|
||||
# this is temporarily needed until we rollout implementation of move and clone for all work item types
|
||||
condition(:supports_move_and_clone, scope: :subject) do
|
||||
@subject.supports_move_and_clone?
|
||||
end
|
||||
|
||||
condition(:work_item_type_available, scope: :subject) do
|
||||
if group_issue?
|
||||
# For now all work item types at group-level require the epics licensed feature
|
||||
epics_license_available?
|
||||
elsif @subject.epic_work_item?
|
||||
project_work_item_epics_available?
|
||||
else
|
||||
true
|
||||
end
|
||||
end
|
||||
|
||||
rule { group_issue & can?(:read_group) }.policy do
|
||||
enable :create_note
|
||||
end
|
||||
|
|
@ -168,9 +179,9 @@ class IssuePolicy < IssuablePolicy
|
|||
prevent :destroy_issue
|
||||
end
|
||||
|
||||
# IMPORTANT: keep the prevent rules as last rules defined in the policy, as these are based on
|
||||
# IMPORTANT: keep the prevention rules as last rules defined in the policy, as these are based on
|
||||
# all abilities defined up to this point.
|
||||
rule { group_issue & ~group_level_issues_license_available }.policy do
|
||||
rule { ~work_item_type_available }.policy do
|
||||
prevent(*::IssuePolicy.ability_map.map.keys)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -41,13 +41,13 @@ class WorkItemPolicy < IssuePolicy
|
|||
prevent :delete_work_item
|
||||
end
|
||||
|
||||
rule { can_report_spam }.enable :report_spam
|
||||
|
||||
# IMPORTANT: keep the prevent rules as last rules defined in the policy, as these are based on
|
||||
# all abilities defined up to this point.
|
||||
rule { group_issue & ~group_level_issues_license_available }.policy do
|
||||
rule { ~work_item_type_available }.policy do
|
||||
prevent(*::WorkItemPolicy.ability_map.map.keys)
|
||||
end
|
||||
|
||||
rule { can_report_spam }.enable :report_spam
|
||||
end
|
||||
|
||||
WorkItemPolicy.prepend_mod
|
||||
|
|
|
|||
|
|
@ -5,7 +5,10 @@ module Import
|
|||
class CreateService
|
||||
include Services::ReturnServiceResponses
|
||||
|
||||
def initialize(source_user:, access_level:, expires_at: nil, group: nil, project: nil)
|
||||
def initialize(
|
||||
source_user:, access_level:, expires_at: nil, group: nil, project: nil,
|
||||
ignore_duplicate_errors: false)
|
||||
@ignore_duplicate_errors = ignore_duplicate_errors
|
||||
@reference = Import::Placeholders::Membership.new(
|
||||
source_user: source_user,
|
||||
namespace_id: source_user.namespace_id,
|
||||
|
|
@ -19,12 +22,33 @@ module Import
|
|||
def execute
|
||||
return success(reference: reference) if reference.save
|
||||
|
||||
if ignore_duplicate_errors?(reference)
|
||||
log_duplicate_membership
|
||||
return success(reference: reference)
|
||||
end
|
||||
|
||||
error(reference.errors.full_messages, :bad_request)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :reference
|
||||
attr_reader :reference, :ignore_duplicate_errors
|
||||
|
||||
def ignore_duplicate_errors?(reference)
|
||||
ignore_duplicate_errors && (reference.errors.of_kind?(:project_id, :taken) ||
|
||||
reference.errors.of_kind?(:group_id, :taken))
|
||||
end
|
||||
|
||||
def log_duplicate_membership
|
||||
logger.info(
|
||||
message: 'Project or group has already been taken. Skipping placeholder membership creation',
|
||||
reference: reference
|
||||
)
|
||||
end
|
||||
|
||||
def logger
|
||||
@logger ||= ::Import::Framework::Logger.build
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ class CreateAiCodeSuggestionEvents < Gitlab::Database::Migration[2.2]
|
|||
|
||||
def up
|
||||
# rubocop:disable Migration/Datetime -- "timestamp" is a column name
|
||||
create_table :ai_code_suggestion_events, # rubocop:disable Migration/EnsureFactoryForTable -- code_suggestion_event
|
||||
create_table :ai_code_suggestion_events,
|
||||
options: 'PARTITION BY RANGE (timestamp)',
|
||||
primary_key: [:id, :timestamp] do |t|
|
||||
t.bigserial :id, null: false
|
||||
|
|
|
|||
|
|
@ -37,13 +37,18 @@ script: |
|
|||
hasError = true
|
||||
}
|
||||
|
||||
// First check if we have a title key at all
|
||||
hasTitleKey := text.re_match("(?m)^[tT]itle:", frontmatter)
|
||||
// Then check if it has content (anything but whitespace) after the colon
|
||||
hasValidTitle := text.re_match("(?m)^[tT]itle:[^\\n]*[^\\s][^\\n]*$", frontmatter)
|
||||
// Check if the page has redirect_to (these pages don't need titles)
|
||||
hasRedirectTo := text.re_match("(?m)^redirect_to:", frontmatter)
|
||||
|
||||
if !hasError && (!hasTitleKey || !hasValidTitle) {
|
||||
hasError = true
|
||||
if !hasRedirectTo {
|
||||
// First check if we have a title key at all
|
||||
hasTitleKey := text.re_match("(?m)^[tT]itle:", frontmatter)
|
||||
// Then check if it has content (anything but whitespace) after the colon
|
||||
hasValidTitle := text.re_match("(?m)^[tT]itle:[^\\n]*[^\\s][^\\n]*$", frontmatter)
|
||||
|
||||
if !hasError && (!hasTitleKey || !hasValidTitle) {
|
||||
hasError = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -451,10 +451,271 @@ A query is built using:
|
|||
|
||||
New scopes must create a new query builder class that inherits from `Search::Elastic::QueryBuilder`.
|
||||
|
||||
#### Filters
|
||||
#### Understanding Queries vs Filters
|
||||
|
||||
The filters below may be used to build Elasticsearch queries. To use a filter, the index must
|
||||
have the required fields in the mapping. Filters use the `options` hash to build JSON which is added to the `query_hash`
|
||||
Queries in Elasticsearch serve two key purposes: filtering documents and calculating relevance scores. When building
|
||||
search functionality:
|
||||
|
||||
- **Queries** are essential when relevance scoring is required to rank results by how well they match search criteria.
|
||||
They use the Boolean query's `must`, `should`, and `must_not` clauses, all of which influence the document's final
|
||||
relevance score.
|
||||
|
||||
- **Filters** (within query context) determine whether documents appear in search results without affecting their score.
|
||||
For search operations where results only need to be included/excluded without ranking by relevance, using filters
|
||||
alone is more efficient and performs better at scale.
|
||||
|
||||
Choose the appropriate approach based on your search requirements - use queries with scoring clauses for ranked results,
|
||||
and rely on filters for simple inclusion/exclusion logic.
|
||||
|
||||
#### Available Queries
|
||||
|
||||
All query builders must return a standardized `query_hash` structure that conforms to Elasticsearch's Boolean query
|
||||
syntax. The `Search::Elastic::BoolExpr` class provides an interface for constructing Boolean queries.
|
||||
|
||||
The required query hash structure is:
|
||||
|
||||
```json
|
||||
{
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": [],
|
||||
"must_not": [],
|
||||
"should": [],
|
||||
"filters": [],
|
||||
"minimum_should_match": null
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
##### `by_iid`
|
||||
|
||||
Query by `iid` field and document type. Requires `type` and `iid` fields.
|
||||
|
||||
```json
|
||||
{
|
||||
"query": {
|
||||
"bool": {
|
||||
"filter": [
|
||||
{
|
||||
"term": {
|
||||
"iid": {
|
||||
"_name": "milestone:related:iid",
|
||||
"value": 1
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"term": {
|
||||
"type": {
|
||||
"_name": "doc:is_a:milestone",
|
||||
"value": "milestone"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
##### `by_full_text`
|
||||
|
||||
Performs a full text search. This query will use `by_multi_match_query` or `by_simple_query_string` if Advanced search syntax is used in the query string. `by_multi_match_query` is behind the `search_uses_match_queries` feature flag.
|
||||
|
||||
##### `by_multi_match_query`
|
||||
|
||||
Uses `multi_match` Elasticsearch API. Can be customized with the following options:
|
||||
|
||||
- `count_only` - uses the Boolean query clause `filter`. Scoring and highlighting are not performed.
|
||||
- `query` - if no query is passed, uses `match_all` Elasticsearch API
|
||||
- `keyword_match_clause` - if `:should` is passed, uses the Boolean query clause `should`. Default: `must` clause
|
||||
|
||||
```json
|
||||
{
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": [
|
||||
{
|
||||
"bool": {
|
||||
"must": [],
|
||||
"must_not": [],
|
||||
"should": [
|
||||
{
|
||||
"multi_match": {
|
||||
"_name": "project:multi_match:and:search_terms",
|
||||
"fields": [
|
||||
"name^10",
|
||||
"name_with_namespace^2",
|
||||
"path_with_namespace",
|
||||
"path^9",
|
||||
"description"
|
||||
],
|
||||
"query": "search",
|
||||
"operator": "and",
|
||||
"lenient": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"multi_match": {
|
||||
"_name": "project:multi_match_phrase:search_terms",
|
||||
"type": "phrase",
|
||||
"fields": [
|
||||
"name^10",
|
||||
"name_with_namespace^2",
|
||||
"path_with_namespace",
|
||||
"path^9",
|
||||
"description"
|
||||
],
|
||||
"query": "search",
|
||||
"lenient": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"filter": [],
|
||||
"minimum_should_match": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"must_not": [],
|
||||
"should": [],
|
||||
"filter": [],
|
||||
"minimum_should_match": null
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
##### `by_simple_query_string`
|
||||
|
||||
Uses `simple_query_string` Elasticsearch API. Can be customized with the following options:
|
||||
|
||||
- `count_only` - uses the Boolean query clause `filter`. Scoring and highlighting are not performed.
|
||||
- `query` - if no query is passed, uses `match_all` Elasticsearch API
|
||||
- `keyword_match_clause` - if `:should` is passed, uses the Boolean query clause `should`. Default: `must` clause
|
||||
|
||||
```json
|
||||
{
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": [
|
||||
{
|
||||
"simple_query_string": {
|
||||
"_name": "project:match:search_terms",
|
||||
"fields": [
|
||||
"name^10",
|
||||
"name_with_namespace^2",
|
||||
"path_with_namespace",
|
||||
"path^9",
|
||||
"description"
|
||||
],
|
||||
"query": "search",
|
||||
"lenient": true,
|
||||
"default_operator": "and"
|
||||
}
|
||||
}
|
||||
],
|
||||
"must_not": [],
|
||||
"should": [],
|
||||
"filter": [],
|
||||
"minimum_should_match": null
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
##### `by_knn`
|
||||
|
||||
Requires options: `vectors_supported` (set to `:elasticsearch` or `:opensearch`) and `embedding_field`. Callers may optionally provide options: `embeddings`
|
||||
|
||||
Performs a hybrid search using embeddings. Uses `full_text_search` unless embeddings are supported.
|
||||
|
||||
{{< alert type="warning" >}}
|
||||
|
||||
Elasticsearch and OpenSearch DSL for `knn` queries is different. To support both, this query must be used with the `by_knn` filter.
|
||||
|
||||
{{< /alert >}}
|
||||
|
||||
The example below is for Elasticsearch.
|
||||
|
||||
```json
|
||||
{
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": [
|
||||
{
|
||||
"bool": {
|
||||
"must": [],
|
||||
"must_not": [],
|
||||
"should": [
|
||||
{
|
||||
"multi_match": {
|
||||
"_name": "work_item:multi_match:and:search_terms",
|
||||
"fields": [
|
||||
"iid^50",
|
||||
"title^2",
|
||||
"description"
|
||||
],
|
||||
"query": "test",
|
||||
"operator": "and",
|
||||
"lenient": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"multi_match": {
|
||||
"_name": "work_item:multi_match_phrase:search_terms",
|
||||
"type": "phrase",
|
||||
"fields": [
|
||||
"iid^50",
|
||||
"title^2",
|
||||
"description"
|
||||
],
|
||||
"query": "test",
|
||||
"lenient": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"filter": [],
|
||||
"minimum_should_match": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"must_not": [],
|
||||
"should": [],
|
||||
"filter": [],
|
||||
"minimum_should_match": null
|
||||
}
|
||||
},
|
||||
"knn": {
|
||||
"field": "embedding_0",
|
||||
"query_vector": [
|
||||
0.030752448365092278,
|
||||
-0.05360432341694832
|
||||
],
|
||||
"boost": 5,
|
||||
"k": 25,
|
||||
"num_candidates": 100,
|
||||
"similarity": 0.6,
|
||||
"filter": []
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Available Filters
|
||||
|
||||
The query builder framework provides a collection of pre-built filters to handle common search scenarios. These filters
|
||||
simplify the process of constructing complex query conditions without having to write raw Elasticsearch query DSL.
|
||||
|
||||
#### Filter Requirements and Usage
|
||||
|
||||
To use any filter:
|
||||
|
||||
1. The index mapping must include all required fields specified in each filter's documentation
|
||||
1. Pass the appropriate parameters via the `options` hash when calling the filter
|
||||
1. Each filter will generate the appropriate JSON structure and add it to your `query_hash`
|
||||
|
||||
Filters can be composed together to create sophisticated search queries while maintaining readable and maintainable
|
||||
code. The following sections detail each available filter, its required fields, supported options, and example output.
|
||||
|
||||
##### `by_type`
|
||||
|
||||
|
|
@ -1184,6 +1445,57 @@ Examples are shown for a logged in user. The JSON may be different for users wit
|
|||
]
|
||||
```
|
||||
|
||||
##### `by_knn`
|
||||
|
||||
Requires options: `vectors_supported` (set to `:elasticsearch` or `:opensearch`) and `embedding_field`. Callers may optionally provide options: `embeddings`
|
||||
|
||||
{{< alert type="warning" >}}
|
||||
|
||||
Elasticsearch and OpenSearch DSL for `knn` queries is different. To support both, this filter must be used with the
|
||||
`by_knn` query.
|
||||
|
||||
{{< /alert >}}
|
||||
|
||||
### Creating a filter
|
||||
|
||||
Filters are essential components in building effective Elasticsearch queries. They help narrow down search results
|
||||
without affecting the relevance scoring.
|
||||
|
||||
- All filters must be documented.
|
||||
- Filters are created as class level methods in `Search::Elastic::Filters`
|
||||
- The method should start with `by_`.
|
||||
- The method must take `query_hash` and `options` parameters only.
|
||||
- `query_hash` is expected to contain a hash with this format.
|
||||
|
||||
```json
|
||||
{ "query":
|
||||
{ "bool":
|
||||
{
|
||||
"must": [],
|
||||
"must_not": [],
|
||||
"should": [],
|
||||
"filters": [],
|
||||
"minimum_should_match": null
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- Use `add_filter` to add the filter to the query hash. Filters should add to the `filters` to avoid calculating score. The score calculation is done by the query itself.
|
||||
- Use `context.name(:filters)` around the filter to add a name to the filter. This helps identify which part of a query and filter have allowed a result to be returned by the search
|
||||
|
||||
```ruby
|
||||
def by_new_filter_type(query_hash:, options:)
|
||||
filter_selected_value = options[:field_value]
|
||||
|
||||
context.name(:filters) do
|
||||
add_filter(query_hash, :query, :bool, :filter) do
|
||||
{ term: { field_name: { _name: context.name(:field_name), value: filter_selected_value } } }
|
||||
end
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
### Sending queries to Elasticsearch
|
||||
|
||||
The queries are sent to `::Gitlab::Search::Client` from `Gitlab::Elastic::SearchResults`.
|
||||
|
|
@ -1192,7 +1504,7 @@ the response from Elasticsearch.
|
|||
|
||||
#### Model requirements
|
||||
|
||||
The model must response to the `to_ability_name` method so that the redaction logic can check if it has
|
||||
The model must respond to the `to_ability_name` method so that the redaction logic can check if it has
|
||||
`Ability.allowed?(current_user, :"read_#{object.to_ability_name}", object)?`. The method must be added if
|
||||
it does not exist.
|
||||
|
||||
|
|
|
|||
|
|
@ -2,11 +2,9 @@
|
|||
stage: none
|
||||
group: unassigned
|
||||
info: Any user with at least the Maintainer role can merge updates to this content. For details, see https://docs.gitlab.com/ee/development/development_processes.html#development-guidelines-review.
|
||||
title: Data Retention Guidelines for Feature Development
|
||||
---
|
||||
|
||||
|
||||
# Data Retention Guidelines for Feature Development
|
||||
|
||||
## Overview
|
||||
|
||||
Data retention is a critical aspect of feature development at GitLab. As we build and maintain features, we must consider the lifecycle of the data we collect and store. This document outlines the guidelines for incorporating data retention considerations into feature development from the outset.
|
||||
|
|
|
|||
|
|
@ -92,7 +92,7 @@ test:
|
|||
|
||||
build:
|
||||
stage: build
|
||||
image: node:18
|
||||
image: node:22
|
||||
script:
|
||||
- npm ci
|
||||
- npm run build
|
||||
|
|
|
|||
|
|
@ -43,17 +43,17 @@ If your organization is facing any of the following challenges, a DevSecOps appr
|
|||
|
||||
<!-- Do not delete the double spaces at the end of these lines. They improve the rendered view. -->
|
||||
|
||||
- **Development, security, and operations teams are siloed.**
|
||||
- **Development, security, and operations teams are siloed.**
|
||||
If development and operations are isolated from security issues,
|
||||
they can't build secure software. And if security teams aren't part of the development process,
|
||||
they can't identify risks proactively. DevSecOps brings teams together to improve workflows
|
||||
and share ideas. Organizations might even see improved employee morale and retention.
|
||||
|
||||
- **Long development cycles are making it difficult to meet customer or stakeholder demands.**
|
||||
- **Long development cycles are making it difficult to meet customer or stakeholder demands.**
|
||||
One reason for the struggle could be security. DevSecOps implements security at every step of
|
||||
the development lifecycle, meaning that solid security doesn't require the whole process to come to a halt.
|
||||
|
||||
- **You're migrating to the cloud (or considering it).**
|
||||
- **You're migrating to the cloud (or considering it).**
|
||||
Moving to the cloud often means bringing on new development processes, tools, and systems.
|
||||
It's a great time to make processes faster and more secure — and DevSecOps could make that a lot easier.
|
||||
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ title: Vulnerabilities in a pipeline
|
|||
|
||||
- [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/490334) in GitLab 17.9 [with a flag](../../../administration/feature_flags.md) named `dependency_scanning_for_pipelines_with_cyclonedx_reports`. Disabled by default.
|
||||
- [Enabled on GitLab.com and GitLab Self-Managed](https://gitlab.com/gitlab-org/gitlab/-/issues/490332) in GitLab 17.9.
|
||||
- Feature flag `dependency_scanning_for_pipelines_with_cyclonedx_reports` removed in 17.10.
|
||||
|
||||
{{< /history >}}
|
||||
|
||||
|
|
|
|||
|
|
@ -19,8 +19,14 @@ After your organization purchases a subscription, an administrator must assign s
|
|||
You likely received an email that notified you of your seat.
|
||||
|
||||
The AI-powered features you have access to use language models to help streamline
|
||||
your workflow. If you're on GitLab Self-Managed, your administrator can choose to use
|
||||
GitLab models, or self-host their own models.
|
||||
your workflow:
|
||||
|
||||
- If you're on GitLab.com, you use default GitLab AI vendor models.
|
||||
- If you're on GitLab Self-Managed, your administrator can either:
|
||||
- [Use default GitLab AI vendor models](../gitlab_duo/setup.md).
|
||||
- Self-host the AI gateway and language models with
|
||||
[GitLab Duo Self-Hosted](../../administration/gitlab_duo_self_hosted/_index.md)
|
||||
and choose from among supported models.
|
||||
|
||||
If you have issues accessing GitLab Duo features, ask your administrator.
|
||||
They can check the health of the installation.
|
||||
|
|
@ -30,7 +36,8 @@ For more information, see:
|
|||
- [Assign seats to users](../../subscriptions/subscription-add-ons.md#assign-gitlab-duo-seats).
|
||||
- [Features included in Duo Pro and Duo Enterprise](https://about.gitlab.com/gitlab-duo/#pricing).
|
||||
- [List of GitLab Duo features and their language models](../gitlab_duo/_index.md).
|
||||
- [Self-hosted models](../../administration/gitlab_duo_self_hosted/_index.md).
|
||||
- [GitLab Duo Self-Hosted](../../administration/gitlab_duo_self_hosted/_index.md).
|
||||
- [GitLab Duo features supported by GitLab Duo Self-Hosted](../../administration/gitlab_duo_self_hosted/_index.md#supported-gitlab-duo-features).
|
||||
- [Health check details](../gitlab_duo/setup.md#run-a-health-check-for-gitlab-duo).
|
||||
|
||||
## Step 2: Try GitLab Duo Chat in the UI
|
||||
|
|
|
|||
|
|
@ -27,6 +27,22 @@ GitLab is [transparent](https://handbook.gitlab.com/handbook/values/#transparenc
|
|||
As GitLab Duo features mature, the documentation will be updated to clearly state
|
||||
how and where you can access these features.
|
||||
|
||||
## Implementing GitLab Duo
|
||||
|
||||
You can implement GitLab Duo in the following ways:
|
||||
|
||||
- On GitLab.com, use the default GitLab AI vendor models and the cloud-based AI
|
||||
gateway that is hosted by GitLab.
|
||||
- On GitLab Self-Managed:
|
||||
|
||||
- [Use the default option of GitLab AI vendor models and the cloud-based AI gateway that is hosted by GitLab](setup.md).
|
||||
- Use [GitLab Duo Self-Hosted](../../administration/gitlab_duo_self_hosted/_index.md)
|
||||
to self-host the AI gateway and language models. You can use GitLab AI vendor models or a
|
||||
supported language model. This provides full control over your data and
|
||||
security. GitLab Duo Code Suggestions and Chat are supported.
|
||||
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [Watch overview](https://youtu.be/TQoO3sFnb28?si=w_gFAYLYIzPEbhEl)
|
||||
<!-- Video published on 2025-02-20 -->
|
||||
|
||||
## Working across the entire software development lifecycle
|
||||
|
||||
To improve your workflow across the entire software development lifecycle, try these features:
|
||||
|
|
@ -35,10 +51,6 @@ To improve your workflow across the entire software development lifecycle, try t
|
|||
and learn about GitLab by asking your questions in a chat window.
|
||||
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [Watch overview](https://www.youtube.com/watch?v=ZQBAuf-CTAY)
|
||||
<!-- Video published on 2024-04-18 -->
|
||||
- [GitLab Duo Self-Hosted](../../administration/gitlab_duo_self_hosted/_index.md): Host the language models that power AI features in GitLab.
|
||||
<i class="fa fa-youtube-play youtube" aria-hidden="true"></i> [Watch overview](https://youtu.be/TQoO3sFnb28?si=w_gFAYLYIzPEbhEl)
|
||||
<!-- Video published on 2025-02-20 -->
|
||||
Code Suggestions and Chat are supported. Use GitLab model vendors or self-host a supported language model.
|
||||
- [GitLab Duo Workflow](../duo_workflow/_index.md): Automate tasks and help increase productivity in your development workflow.
|
||||
- [AI Impact Dashboard](../analytics/ai_impact_analytics.md): Measure the AI effectiveness and impact on SDLC metrics.
|
||||
|
||||
|
|
|
|||
|
|
@ -3,10 +3,9 @@ stage: none
|
|||
group: Tutorials
|
||||
description: Tutorial on how to create a shop application in Python with GitLab Duo.
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
|
||||
title: 'Tutorial: Use GitLab Duo to create a shop application in Python'
|
||||
---
|
||||
|
||||
# Tutorial: Use GitLab Duo to create a shop application in Python
|
||||
|
||||
<!-- vale gitlab_base.FutureTense = NO -->
|
||||
|
||||
You have been hired as a developer at an online bookstore. The current system for
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ module BulkImports
|
|||
end
|
||||
|
||||
def create_placeholder_membership(data)
|
||||
result = Import::PlaceholderMemberships::CreateService.new(**data).execute
|
||||
result = Import::PlaceholderMemberships::CreateService.new(**data, ignore_duplicate_errors: true).execute
|
||||
|
||||
return unless result.error?
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@ module Gitlab
|
|||
module Database
|
||||
module Partitioning
|
||||
class DetachedPartitionDropper
|
||||
PROCESSING_DELAY = 1.minute
|
||||
|
||||
def perform
|
||||
Gitlab::AppLogger.info(message: "Checking for previously detached partitions to drop")
|
||||
|
||||
|
|
@ -12,6 +14,8 @@ module Gitlab
|
|||
else
|
||||
drop_partition(detached_partition)
|
||||
end
|
||||
|
||||
sleep(PROCESSING_DELAY)
|
||||
rescue StandardError => e
|
||||
Gitlab::AppLogger.error(message: "Failed to drop previously detached partition",
|
||||
partition_name: detached_partition.table_name,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
# Usage
|
||||
# 1. Install requirements:
|
||||
# pip install requests langchain langchain_text_splitter
|
||||
# pip install requests langchain langchain_text_splitters
|
||||
# 2. Run the script:
|
||||
# GLAB_TOKEN=<api_token> python3 scripts/custom_models/create_index.py --version_tag="v17.0.0"
|
||||
|
||||
|
|
|
|||
|
|
@ -2,12 +2,16 @@ import { DiffFile } from '~/rapid_diffs/diff_file';
|
|||
import { OptionsMenuAdapter } from '~/rapid_diffs/options_menu/adapter';
|
||||
|
||||
describe('Diff File Options Menu', () => {
|
||||
const item1 = { text: 'item 1', path: 'item/1/path' };
|
||||
const html = `
|
||||
<diff-file data-viewer="any">
|
||||
<div class="rd-diff-file">
|
||||
<div class="rd-diff-file-header" data-testid="rd-diff-file-header">
|
||||
<div class="rd-diff-file-options-menu gl-ml-2">
|
||||
<div class="js-options-menu">
|
||||
<script type="application/json">
|
||||
[{"text": "${item1.text}", "href": "${item1.path}"}]
|
||||
</script>
|
||||
<button class="js-options-button" data-click="toggleOptionsMenu" type="button"></button>
|
||||
</div>
|
||||
</div>
|
||||
|
|
@ -23,6 +27,8 @@ describe('Diff File Options Menu', () => {
|
|||
container: () => get('file').querySelector('.js-options-menu'),
|
||||
serverButton: () => get('container').querySelector('.js-options-button'),
|
||||
vueButton: () => get('container').querySelector('[data-testid="base-dropdown-toggle"]'),
|
||||
menuItems: () =>
|
||||
get('container').querySelectorAll('[data-testid="disclosure-dropdown-item"]'),
|
||||
};
|
||||
|
||||
return elements[element]?.();
|
||||
|
|
@ -61,4 +67,16 @@ describe('Diff File Options Menu', () => {
|
|||
*/
|
||||
expect(get('serverButton')).toBeNull();
|
||||
});
|
||||
|
||||
it('renders the correct menu items in the GlDisclosureDropdown as provided by the back end', () => {
|
||||
const button = get('serverButton');
|
||||
|
||||
button.click();
|
||||
|
||||
const items = Array.from(get('menuItems'));
|
||||
|
||||
expect(items.length).toBe(1);
|
||||
expect(items[0].textContent.trim()).toBe(item1.text);
|
||||
expect(items[0].querySelector('a').getAttribute('href')).toBe(item1.path);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -309,6 +309,7 @@ RSpec.describe BulkImports::Common::Pipelines::MembersPipeline, feature_category
|
|||
source_user: source_user,
|
||||
access_level: 30,
|
||||
expires_at: '2020-01-01T00:00:00Z',
|
||||
ignore_duplicate_errors: true,
|
||||
group: portable.is_a?(Group) ? portable : nil,
|
||||
project: portable.is_a?(Project) ? portable : nil) do |service|
|
||||
expect(service).to receive(:execute).and_return(ServiceResponse.success)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do
|
||||
RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper, feature_category: :database do
|
||||
include Database::TableSchemaHelpers
|
||||
|
||||
subject(:dropper) { described_class.new }
|
||||
|
|
@ -24,6 +24,7 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do
|
|||
end
|
||||
|
||||
before do
|
||||
stub_const("#{described_class}::PROCESSING_DELAY", 0.1)
|
||||
connection.execute(<<~SQL)
|
||||
CREATE TABLE _test_referenced_table (
|
||||
id bigserial primary key not null
|
||||
|
|
@ -239,5 +240,34 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'processes partitions with a delay' do
|
||||
before do
|
||||
create_partition(
|
||||
name: :_test_partition_1,
|
||||
from: 3.months.ago,
|
||||
to: 2.months.ago,
|
||||
attached: false,
|
||||
drop_after: 1.second.ago
|
||||
)
|
||||
|
||||
create_partition(
|
||||
name: :_test_partition_2,
|
||||
from: 2.months.ago,
|
||||
to: 1.month.ago,
|
||||
attached: false,
|
||||
drop_after: 1.second.ago
|
||||
)
|
||||
end
|
||||
|
||||
it 'waits between processing each partition' do
|
||||
expect(dropper).to receive(:sleep).with(described_class::PROCESSING_DELAY).twice
|
||||
|
||||
dropper.perform
|
||||
|
||||
expect_partition_removed(:_test_partition_1)
|
||||
expect_partition_removed(:_test_partition_2)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -228,6 +228,7 @@ RSpec.describe IssuePolicy, feature_category: :team_planning do
|
|||
it_behaves_like 'alert bot'
|
||||
it_behaves_like 'support bot with service desk disabled'
|
||||
it_behaves_like 'support bot with service desk enabled'
|
||||
it_behaves_like 'prevents access to project-level {issues|work_items} with type Epic', :issue
|
||||
|
||||
context 'with confidential issues' do
|
||||
let(:confidential_issue) { create(:issue, :confidential, project: project, assignees: [assignee], author: author) }
|
||||
|
|
@ -546,6 +547,7 @@ RSpec.describe IssuePolicy, feature_category: :team_planning do
|
|||
|
||||
it_behaves_like 'alert bot'
|
||||
it_behaves_like 'support bot with service desk enabled'
|
||||
it_behaves_like 'prevents access to project-level {issues|work_items} with type Epic', :issue
|
||||
|
||||
context 'when issues are private' do
|
||||
before_all do
|
||||
|
|
|
|||
|
|
@ -11,11 +11,15 @@ RSpec.describe WorkItemPolicy, :aggregate_failures, feature_category: :team_plan
|
|||
|
||||
let_it_be(:admin) { create(:user, :admin) }
|
||||
let_it_be(:non_member_user) { create(:user) }
|
||||
let_it_be(:author) { create(:user) }
|
||||
let_it_be(:assignee) { create(:user) }
|
||||
let_it_be(:support_bot) { Users::Internal.support_bot }
|
||||
|
||||
let_it_be(:guest) { create(:user, guest_of: [private_project, public_project]) }
|
||||
let_it_be(:guest_author) { create(:user, guest_of: [private_project, public_project]) }
|
||||
let_it_be(:planner) { create(:user, planner_of: [private_project, public_project]) }
|
||||
let_it_be(:reporter) { create(:user, reporter_of: [private_project, public_project]) }
|
||||
let_it_be(:owner) { create(:user, owner_of: [private_project, public_project]) }
|
||||
|
||||
let_it_be(:group_guest) { create(:user, guest_of: [private_group, public_group]) }
|
||||
let_it_be(:group_planner) { create(:user, planner_of: [private_group, public_group]) }
|
||||
|
|
@ -41,6 +45,9 @@ RSpec.describe WorkItemPolicy, :aggregate_failures, feature_category: :team_plan
|
|||
let_it_be(:incident_work_item) { create(:work_item, :incident, project: private_project) }
|
||||
|
||||
it_behaves_like 'checks abilities for project level work items'
|
||||
it_behaves_like 'prevents access to project-level {issues|work_items} with type Epic', :work_item do
|
||||
let_it_be(:project) { private_project }
|
||||
end
|
||||
|
||||
it 'checks non-member abilities' do
|
||||
# disallowed
|
||||
|
|
@ -77,6 +84,9 @@ RSpec.describe WorkItemPolicy, :aggregate_failures, feature_category: :team_plan
|
|||
let_it_be(:incident_work_item) { create(:work_item, :incident, project: public_project) }
|
||||
|
||||
it_behaves_like 'checks abilities for project level work items'
|
||||
it_behaves_like 'prevents access to project-level {issues|work_items} with type Epic', :work_item do
|
||||
let_it_be(:project) { public_project }
|
||||
end
|
||||
|
||||
it 'checks non-member abilities' do
|
||||
# allowed
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ RSpec.describe Import::PlaceholderMemberships::CreateService, feature_category:
|
|||
|
||||
let(:access_level) { Gitlab::Access::GUEST }
|
||||
let(:expires_at) { Date.today.next_month }
|
||||
let(:ignore_duplicate_errors) { false }
|
||||
|
||||
subject(:result) do
|
||||
described_class.new(
|
||||
|
|
@ -17,7 +18,8 @@ RSpec.describe Import::PlaceholderMemberships::CreateService, feature_category:
|
|||
access_level: access_level,
|
||||
expires_at: expires_at,
|
||||
group: group,
|
||||
project: project
|
||||
project: project,
|
||||
ignore_duplicate_errors: ignore_duplicate_errors
|
||||
).execute
|
||||
end
|
||||
|
||||
|
|
@ -36,6 +38,37 @@ RSpec.describe Import::PlaceholderMemberships::CreateService, feature_category:
|
|||
project: project
|
||||
)
|
||||
end
|
||||
|
||||
context 'when placeholder membership already exists' do
|
||||
let_it_be(:existing_membership) do
|
||||
create(:import_placeholder_membership, source_user: source_user, project: project)
|
||||
end
|
||||
|
||||
context "when ignoring duplicate membership errors" do
|
||||
let(:ignore_duplicate_errors) { true }
|
||||
|
||||
it 'does not create a member, logs information, and returns success' do
|
||||
expect_next_instance_of(::Import::Framework::Logger) do |logger|
|
||||
expect(logger).to receive(:info).with(
|
||||
message: 'Project or group has already been taken. Skipping placeholder membership creation',
|
||||
reference: be_a(Import::Placeholders::Membership)
|
||||
)
|
||||
end
|
||||
|
||||
expect { result }.not_to change { Import::Placeholders::Membership.count }
|
||||
expect(result).to be_success
|
||||
end
|
||||
end
|
||||
|
||||
context "when observing duplicate membership errors" do
|
||||
it 'returns an error' do
|
||||
expect { result }.not_to change { Import::Placeholders::Membership.count }
|
||||
expect(result).to be_error
|
||||
expect(result.http_status).to eq(:bad_request)
|
||||
expect(result.message).to include('Project has already been taken')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when group is provided' do
|
||||
|
|
|
|||
|
|
@ -115,3 +115,50 @@ RSpec.shared_examples 'checks abilities for project level work items' do
|
|||
.to be_disallowed(:delete_work_item, :summarize_comments)
|
||||
end
|
||||
end
|
||||
|
||||
RSpec.shared_examples 'prevents access to project-level {issues|work_items} with type Epic' do |factory|
|
||||
context 'with Epic work item type' do
|
||||
let_it_be(:with_epic_type) do
|
||||
create(
|
||||
factory,
|
||||
work_item_type: WorkItems::Type.default_by_type(:epic),
|
||||
project: project,
|
||||
assignees: [assignee],
|
||||
author: author
|
||||
)
|
||||
end
|
||||
|
||||
let(:work_item_permissions) do
|
||||
%i[delete_work_item set_work_item_metadata admin_work_item_link admin_parent_link move_work_item clone_work_item]
|
||||
end
|
||||
|
||||
let(:issue_permissions) do
|
||||
%i[
|
||||
read_cross_project admin_all_resources read_all_resources change_repository_storage resolve_note read_issue
|
||||
update_issue reopen_issue update_merge_request reopen_merge_request create_note admin_note award_emoji
|
||||
read_incident_management_timeline_event admin_incident_management_timeline_event create_timelog read_issuable
|
||||
read_issuable_participables read_note ead_internal_note set_note_created_at mark_note_as_internal
|
||||
reposition_note create_issue admin_issue destroy_issue read_issue_iid read_design create_design update_design
|
||||
destroy_design move_design create_todo update_subscription set_issue_metadata admin_issue_link
|
||||
set_confidentiality admin_issue_relation read_crm_contacts set_issue_crm_contacts move_issue clone_issue
|
||||
read_work_item create_work_item update_work_item admin_work_item destroy_work_item
|
||||
]
|
||||
end
|
||||
|
||||
let(:abilities) { factory == :work_item ? issue_permissions.append(*work_item_permissions) : issue_permissions }
|
||||
|
||||
it "does not allow anonymous any access to the #{factory}" do
|
||||
expect(permissions(nil, with_epic_type)).to be_disallowed(*abilities)
|
||||
end
|
||||
|
||||
where(role: %w[guest planner reporter owner admin assignee author support_bot])
|
||||
|
||||
with_them do
|
||||
let(:current_user) { public_send(role) }
|
||||
|
||||
it "does not allow user any access to the #{factory}" do
|
||||
expect(permissions(current_user, with_epic_type)).to be_disallowed(*abilities)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ RSpec.shared_examples 'cloneable and moveable work item' do
|
|||
expect(new_work_item).to be_persisted
|
||||
expect(new_work_item).to have_attributes(original_work_item_attrs)
|
||||
|
||||
if new_work_item.work_item_type.epic?
|
||||
if new_work_item.group_epic_work_item?
|
||||
expect(new_work_item.reload.sync_object).to be_persisted
|
||||
expect(new_work_item.sync_object.title).to eq(original_work_item.sync_object.title)
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in New Issue