+
folder_project_environments_path(@project, @folder, format: :json),
"folder_name" => @folder,
+ "project_path" => project_path(@project),
+ "help_page_path" => help_page_path("ci/environments/index"),
"can_read_environment" => can?(current_user, :read_environment, @project).to_s
}
end
diff --git a/app/workers/concerns/gitlab/bitbucket_server_import/object_importer.rb b/app/workers/concerns/gitlab/bitbucket_server_import/object_importer.rb
index 1090d82c922..fbcb5d81c8a 100644
--- a/app/workers/concerns/gitlab/bitbucket_server_import/object_importer.rb
+++ b/app/workers/concerns/gitlab/bitbucket_server_import/object_importer.rb
@@ -7,6 +7,8 @@ module Gitlab
module ObjectImporter
extend ActiveSupport::Concern
+ FAILED_IMPORT_STATES = %w[canceled failed].freeze
+
included do
include ApplicationWorker
@@ -33,8 +35,10 @@ module Gitlab
return unless project
- if project.import_state&.canceled?
- info(project.id, message: 'project import canceled')
+ import_state = project.import_status
+
+ if FAILED_IMPORT_STATES.include?(import_state)
+ info(project.id, message: "project import #{import_state}")
return
end
diff --git a/config/feature_flags/development/bulk_import_async_references_pipeline.yml b/config/feature_flags/development/bulk_import_async_references_pipeline.yml
deleted file mode 100644
index bd6fc4ee91c..00000000000
--- a/config/feature_flags/development/bulk_import_async_references_pipeline.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-name: bulk_import_async_references_pipeline
-introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/135806
-rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/430181
-milestone: '16.7'
-type: development
-group: group::import and integrate
-default_enabled: false
diff --git a/db/post_migrate/20231113164909_drop_index_users_on_accepted_term_id.rb b/db/post_migrate/20231113164909_drop_index_users_on_accepted_term_id.rb
new file mode 100644
index 00000000000..2cbf68efd25
--- /dev/null
+++ b/db/post_migrate/20231113164909_drop_index_users_on_accepted_term_id.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+class DropIndexUsersOnAcceptedTermId < Gitlab::Database::Migration[2.2]
+ milestone '16.7'
+ disable_ddl_transaction!
+
+ TABLE_NAME = 'users'
+ INDEX_NAME = 'index_users_on_accepted_term_id'
+ COLUMN = 'accepted_term_id'
+
+ def up
+ remove_concurrent_index_by_name TABLE_NAME, name: INDEX_NAME
+ end
+
+ def down
+ add_concurrent_index TABLE_NAME, COLUMN, name: INDEX_NAME
+ end
+end
diff --git a/db/schema_migrations/20231113164909 b/db/schema_migrations/20231113164909
new file mode 100644
index 00000000000..344fd046b14
--- /dev/null
+++ b/db/schema_migrations/20231113164909
@@ -0,0 +1 @@
+5d4421756749b4ef2cc74213e5e73d6ecec8f47d7db3645941f5d66d162100ac
\ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index be9eee3f9af..0dc47575f9e 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -34882,8 +34882,6 @@ CREATE INDEX index_users_for_active_billable_users ON users USING btree (id) WHE
CREATE INDEX index_users_for_auditors ON users USING btree (id) WHERE (auditor IS TRUE);
-CREATE INDEX index_users_on_accepted_term_id ON users USING btree (accepted_term_id);
-
CREATE INDEX index_users_on_admin ON users USING btree (admin);
CREATE UNIQUE INDEX index_users_on_confirmation_token ON users USING btree (confirmation_token);
diff --git a/doc/administration/admin_area.md b/doc/administration/admin_area.md
index c37f7a5d289..feb874279f5 100644
--- a/doc/administration/admin_area.md
+++ b/doc/administration/admin_area.md
@@ -8,7 +8,7 @@ type: reference
# GitLab Admin Area **(FREE SELF)**
The Admin Area provides a web UI to manage and configure features of GitLab
-self-managed instances. If you are an administrator,to access the Admin Area:
+self-managed instances. If you are an administrator, to access the Admin Area:
- In GitLab 16.1 and later: on the left sidebar, select **Search or go to**, then select **Admin Area**.
- In GitLab 16.0 and earlier: on the top bar, select **Main menu > Admin**.
diff --git a/doc/ci/environments/protected_environments.md b/doc/ci/environments/protected_environments.md
index e594ff725a4..dd72ea0522c 100644
--- a/doc/ci/environments/protected_environments.md
+++ b/doc/ci/environments/protected_environments.md
@@ -27,6 +27,7 @@ Maintainer role.
Prerequisites:
- When granting the **Allowed to deploy** permission to a group or subgroup, the user configuring the protected environment must be a **direct member** of the group or subgroup to be added. Otherwise, the group or subgroup does not show up in the dropdown list. For more information see [issue #345140](https://gitlab.com/gitlab-org/gitlab/-/issues/345140).
+- When granting **Allowed to deploy** and **Approvers** permissions to a group or project by using the settings UI, only direct members of the group or project receive these permissions. To grant these permissions to inherited members also, [use the API](../../api/protected_environments.md#group-inheritance-types). For more information see [issue #422392](https://gitlab.com/gitlab-org/gitlab/-/issues/422392).
To protect an environment:
diff --git a/doc/development/database/single_table_inheritance.md b/doc/development/database/single_table_inheritance.md
index 7dbec12c7f6..ecc42a3b5d1 100644
--- a/doc/development/database/single_table_inheritance.md
+++ b/doc/development/database/single_table_inheritance.md
@@ -53,8 +53,13 @@ class Animal < ActiveRecord::Base
def self.inheritance_column = 'species'
end
-class Dog < Animal; end
-class Cat < Animal; end
+class Dog < Animal
+ self.allow_legacy_sti_class = true
+end
+
+class Cat < Animal
+ self.allow_legacy_sti_class = true
+end
```
If your table already has a `*_type`, new classes for the different types can be added as needed.
diff --git a/doc/subscriptions/self_managed/index.md b/doc/subscriptions/self_managed/index.md
index 01e6faf5b78..e5523003a3b 100644
--- a/doc/subscriptions/self_managed/index.md
+++ b/doc/subscriptions/self_managed/index.md
@@ -467,6 +467,10 @@ existing feature requests in the [GitLab](https://gitlab.com/gitlab-org/gitlab/-
These issues are the best avenue for getting updates on specific product plans
and for communicating directly with the relevant GitLab team members.
+## Storage
+
+The amount of storage and transfer for self-managed instances has no application limits. Administrators are responsible for the underlying infrastructure costs and can set [repository size limits](../../administration/settings/account_and_limit_settings.md#repository-size-limit).
+
## Troubleshooting
### Subscription data fails to synchronize
diff --git a/doc/user/application_security/policies/scan-result-policies.md b/doc/user/application_security/policies/scan-result-policies.md
index d73cb73f0ac..94b16d42e88 100644
--- a/doc/user/application_security/policies/scan-result-policies.md
+++ b/doc/user/application_security/policies/scan-result-policies.md
@@ -116,7 +116,7 @@ This rule enforces the defined actions based on security scan findings.
|-------|------|----------|-----------------|-------------|
| `type` | `string` | true | `scan_finding` | The rule's type. |
| `branches` | `array` of `string` | true if `branch_type` field does not exist | `[]` or the branch's name | Applicable only to protected target branches. An empty array, `[]`, applies the rule to all protected target branches. Cannot be used with the `branch_type` field. |
-| `branch_type` | `string` | true if `branches` field does not exist | `default` or `protected` | The types of branches the given policy applies to. Cannot be used with the `branches` field. |
+| `branch_type` | `string` | true if `branches` field does not exist | `default` or `protected` | The types of protected branches the given policy applies to. Cannot be used with the `branches` field. Default branches must also be `protected`. |
| `branch_exceptions` | `array` of `string` | false | Names of branches | Branches to exclude from this rule. |
| `scanners` | `array` of `string` | true | `sast`, `secret_detection`, `dependency_scanning`, `container_scanning`, `dast`, `coverage_fuzzing`, `api_fuzzing` | The security scanners for this rule to consider. `sast` includes results from both SAST and SAST IaC scanners. |
| `vulnerabilities_allowed` | `integer` | true | Greater than or equal to zero | Number of vulnerabilities allowed before this rule is considered. |
@@ -137,7 +137,7 @@ This rule enforces the defined actions based on license findings.
|------------|------|----------|-----------------|-------------|
| `type` | `string` | true | `license_finding` | The rule's type. |
| `branches` | `array` of `string` | true if `branch_type` field does not exist | `[]` or the branch's name | Applicable only to protected target branches. An empty array, `[]`, applies the rule to all protected target branches. Cannot be used with the `branch_type` field. |
-| `branch_type` | `string` | true if `branches` field does not exist | `default` or `protected` | The types of branches the given policy applies to. Cannot be used with the `branches` field. |
+| `branch_type` | `string` | true if `branches` field does not exist | `default` or `protected` | The types of protected branches the given policy applies to. Cannot be used with the `branches` field. Default branches must also be `protected`. |
| `branch_exceptions` | `array` of `string` | false | Names of branches | Branches to exclude from this rule. |
| `match_on_inclusion` | `boolean` | true | `true`, `false` | Whether the rule matches inclusion or exclusion of licenses listed in `license_types`. |
| `license_types` | `array` of `string` | true | license types | [SPDX license names](https://spdx.org/licenses) to match on, for example `Affero General Public License v1.0` or `MIT License`. |
@@ -158,7 +158,7 @@ This rule enforces the defined actions for any merge request based on the commit
|---------------|---------------------|--------------------------------------------|---------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `type` | `string` | true | `any_merge_request` | The rule's type. |
| `branches` | `array` of `string` | true if `branch_type` field does not exist | `[]` or the branch's name | Applicable only to protected target branches. An empty array, `[]`, applies the rule to all protected target branches. Cannot be used with the `branch_type` field. |
-| `branch_type` | `string` | true if `branches` field does not exist | `default` or `protected` | The types of branches the given policy applies to. Cannot be used with the `branches` field. |
+| `branch_type` | `string` | true if `branches` field does not exist | `default` or `protected` | The types of protected branches the given policy applies to. Cannot be used with the `branches` field. Default branches must also be `protected`. |
| `branch_exceptions` | `array` of `string` | false | Names of branches | Branches to exclude from this rule. |
| `commits` | `string` | true | `any`, `unsigned` | Whether the rule matches for any commits, or only if unsigned commits are detected in the merge request. |
diff --git a/doc/user/project/import/github.md b/doc/user/project/import/github.md
index 5f724c83c49..c1284c303f0 100644
--- a/doc/user/project/import/github.md
+++ b/doc/user/project/import/github.md
@@ -84,6 +84,8 @@ If you are importing from GitHub.com to a self-managed GitLab instance:
[OmniAuth configuration](../../../integration/github.md#enable-github-oauth-in-gitlab).
- Because of a [known issue](https://gitlab.com/gitlab-org/gitlab/-/issues/424400), Markdown attachments from
repositories on GitHub Enterprise Server instances aren't imported.
+- Because of a [known issue](https://gitlab.com/gitlab-org/gitlab/-/issues/418800), when importing projects that used
+ [GitHub auto-merge](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/incorporating-changes-from-a-pull-request/automatically-merging-a-pull-request), the imported project in GitLab can have merge commits labeled "unverified" if the commit was signed with GitHub's internal GPG key.
## Import your GitHub repository into GitLab
diff --git a/doc/user/project/repository/code_suggestions/index.md b/doc/user/project/repository/code_suggestions/index.md
index b44e26f8daf..cf2cf510b8e 100644
--- a/doc/user/project/repository/code_suggestions/index.md
+++ b/doc/user/project/repository/code_suggestions/index.md
@@ -36,14 +36,17 @@ GitLab Duo Code Suggestions are available:
During Beta, usage of Code Suggestions is governed by the [GitLab Testing Agreement](https://about.gitlab.com/handbook/legal/testing-agreement/).
-Learn about [data usage when using Code Suggestions](#code-suggestions-data-usage). As Code Suggestions matures to General Availibility it will be governed by our [AI Functionality Terms](https://about.gitlab.com/handbook/legal/ai-functionality-terms/).
+Learn about [data usage when using Code Suggestions](#code-suggestions-data-usage). As Code Suggestions matures to General Availability it will be governed by our [AI Functionality Terms](https://about.gitlab.com/handbook/legal/ai-functionality-terms/).
## Use Code Suggestions
Prerequisites:
-- Code Suggestions must be enabled for [SaaS](saas.md#enable-code-suggestions) or for [self-managed](self_managed.md#enable-code-suggestions-on-self-managed-gitlab).
- You must have installed and configured a [supported IDE editor extension](index.md#supported-editor-extensions).
+- If you are a **SaaS** user, you must enable Code Suggestions for:
+ - [The top-level group](../../../group/manage.md#enable-code-suggestions) (you must have the Owner role for that group).
+ - [Your own account](../../../profile/preferences.md#enable-code-suggestions).
+ - If you are a **self-managed** user, you must enable Code Suggestions [for your instance](self_managed.md#enable-code-suggestions-on-self-managed-gitlab). How you enable Code Suggestions differs depending on your version of GitLab.
To use Code Suggestions:
diff --git a/doc/user/usage_quotas.md b/doc/user/usage_quotas.md
index bfb5694ff53..87430eb7adc 100644
--- a/doc/user/usage_quotas.md
+++ b/doc/user/usage_quotas.md
@@ -5,7 +5,7 @@ group: Utilization
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
-# Storage **(FREE ALL)**
+# Storage **(FREE SAAS)**
All projects on GitLab SaaS have 10 GiB of free storage for their Git repository and Large File Storage (LFS).
@@ -16,13 +16,11 @@ you must [purchase more storage](../subscriptions/gitlab_com/index.md#purchase-m
GitLab plans to introduce storage limits for namespaces on GitLab SaaS. After these storage limits have been applied,
storage usage will be calculated across the entire namespace and project storage limits will no longer apply.
-The amount of storage and transfer for self-managed instances has no application limits. Administrators are responsible for the underlying infrastructure costs and can set [repository size limits](../administration/settings/account_and_limit_settings.md#repository-size-limit).
-
-## View storage
+## View storage **(FREE ALL)**
You can view the following statistics for storage usage in projects and namespaces:
-- Storage usage that exceeds the GitLab SaaS storage limit or [self-managed storage quota](../administration/settings/account_and_limit_settings.md#repository-size-limit).
+- Storage usage that exceeds the GitLab SaaS storage limit or [self-managed storage limits](../administration/settings/account_and_limit_settings.md#repository-size-limit).
- Available purchased storage for GitLab SaaS.
Prerequisites:
@@ -49,7 +47,7 @@ NOTE:
Storage usage labels are being transitioned from `KB` to `KiB`, `MB` to `MiB`, and `GB` to `GiB`. During this transition,
you might see references to `KB`, `MB`, and `GB` in the UI and documentation.
-### View project fork storage usage **(FREE SAAS)**
+## View project fork storage usage
A cost factor is applied to the storage consumed by project forks so that forks consume less namespace storage than their actual size.
@@ -63,7 +61,7 @@ The cost factor applies to the project repository, LFS objects, job artifacts, p
The cost factor does not apply to private forks in namespaces on the Free plan.
-## Excess storage usage **(FREE SAAS)**
+## Excess storage usage
Excess storage usage is the amount that exceeds the 10 GiB free storage of a project's repository and LFS. If no purchased storage is available,
the project is set to a read-only state. You cannot push changes to a read-only project.
@@ -109,7 +107,7 @@ available decreases. All projects no longer have the read-only status because 40
| Yellow | 5 GiB | 0 GiB | 10 GiB | Not read-only |
| **Totals** | **45 GiB** | **10 GiB** | - | - |
-## Namespace storage limit **(FREE SAAS)**
+## Namespace storage limit
GitLab plans to introduce the following storage limits per top-level group:
@@ -160,7 +158,7 @@ Storage types that count toward the total namespace storage are:
- Wiki
- Snippets
-### Excess storage notifications **(FREE SAAS)**
+### Excess storage notifications
Storage limits are included in GitLab subscription terms but do not apply. At least 60 days before GitLab introduces storage limits,
GitLab will notify you of namespaces that exceed, or are close to exceeding, the storage limit.
diff --git a/lib/api/entities/ml/mlflow/list_registered_models.rb b/lib/api/entities/ml/mlflow/list_registered_models.rb
new file mode 100644
index 00000000000..05ef98c4ac6
--- /dev/null
+++ b/lib/api/entities/ml/mlflow/list_registered_models.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Ml
+ module Mlflow
+ class ListRegisteredModels < Grape::Entity
+ expose :registered_models, with: RegisteredModel, as: :registered_models
+ expose :next_page_token
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/ml/mlflow/registered_model.rb b/lib/api/entities/ml/mlflow/registered_model.rb
index f0f9cda86e0..bb547f9c46c 100644
--- a/lib/api/entities/ml/mlflow/registered_model.rb
+++ b/lib/api/entities/ml/mlflow/registered_model.rb
@@ -21,6 +21,10 @@ module API
def last_updated_timestamp
object.updated_at.to_i
end
+
+ def description
+ object.description.to_s
+ end
end
end
end
diff --git a/lib/api/ml/mlflow/api_helpers.rb b/lib/api/ml/mlflow/api_helpers.rb
index ddafdb3bad0..fb830dfe9e3 100644
--- a/lib/api/ml/mlflow/api_helpers.rb
+++ b/lib/api/ml/mlflow/api_helpers.rb
@@ -4,6 +4,8 @@ module API
module Ml
module Mlflow
module ApiHelpers
+ OUTER_QUOTES_REGEXP = /^("|')|("|')?$/
+
def check_api_read!
not_found! unless can?(current_user, :read_model_experiments, user_project)
end
@@ -83,6 +85,34 @@ module API
}
end
+ def model_order_params(params)
+ if params[:order_by].blank?
+ order_by = 'name'
+ sort = 'asc'
+ else
+ order_by, sort = params[:order_by].downcase.split(' ')
+ order_by = 'updated_at' if order_by == 'last_updated_timestamp'
+ sort ||= 'asc'
+ end
+
+ {
+ order_by: order_by,
+ sort: sort
+ }
+ end
+
+ def model_filter_params(params)
+ return {} if params[:filter].blank?
+
+ param, filter = params[:filter].split('=')
+
+ return {} unless param == 'name'
+
+ filter.gsub!(OUTER_QUOTES_REGEXP, '') unless filter.blank?
+
+ { name: filter }
+ end
+
def find_experiment!(iid, name)
experiment_repository.by_iid_or_name(iid: iid, name: name) || resource_not_found!
end
diff --git a/lib/api/ml/mlflow/registered_models.rb b/lib/api/ml/mlflow/registered_models.rb
index 5358453ec7e..a68a2767a74 100644
--- a/lib/api/ml/mlflow/registered_models.rb
+++ b/lib/api/ml/mlflow/registered_models.rb
@@ -120,6 +120,49 @@ module API
render_api_error!('Model could not be deleted', 400)
end
end
+
+ desc 'Search Registered Models within a project' do
+ success Entities::Ml::Mlflow::RegisteredModel
+ detail 'https://mlflow.org/docs/2.6.0/rest-api.html#search-registeredmodels'
+ end
+ params do
+ optional :filter,
+ type: String,
+ desc: "Filter to search models. must be in the format `name='value'`. Only filtering by name is supported"
+ optional :max_results,
+ type: Integer,
+ desc: 'Maximum number of models desired. Default is 200. Max threshold is 1000.',
+ default: 200
+ optional :order_by,
+ type: String,
+ desc: 'Order criteria. Can be by name or last_updated_timestamp, with optional DESC or ASC (default)' \
+ 'Valid examples: `name`, `name DESC`, `last_updated_timestamp DESC`' \
+ 'Sorting by model metadata is not supported.',
+ default: 'name ASC'
+ optional :page_token,
+ type: String,
+ desc: 'Token for pagination'
+ end
+ get 'search', urgency: :low do
+ max_results = [params[:max_results], 1000].min
+
+ finder_params = model_order_params(params)
+ filter_params = model_filter_params(params)
+
+ if !params[:filter].nil? && !filter_params.key?(:name)
+ invalid_parameter!("Invalid attribute key specified. Valid keys are '{'name'}'")
+ end
+
+ finder = ::Projects::Ml::ModelFinder.new(user_project, finder_params.merge(filter_params))
+ paginator = finder.execute.keyset_paginate(cursor: params[:page_token], per_page: max_results)
+
+ result = {
+ registered_models: paginator.records,
+ next_page_token: paginator.cursor_for_next_page
+ }
+
+ present result, with: Entities::Ml::Mlflow::ListRegisteredModels
+ end
end
end
end
diff --git a/lib/bulk_imports/projects/pipelines/legacy_references_pipeline.rb b/lib/bulk_imports/projects/pipelines/legacy_references_pipeline.rb
deleted file mode 100644
index 11bbd4770a4..00000000000
--- a/lib/bulk_imports/projects/pipelines/legacy_references_pipeline.rb
+++ /dev/null
@@ -1,131 +0,0 @@
-# frozen_string_literal: true
-
-module BulkImports
- module Projects
- module Pipelines
- class LegacyReferencesPipeline
- include Pipeline
-
- BATCH_SIZE = 100
-
- def extract(_context)
- data = Enumerator.new do |enum|
- add_matching_objects(portable.issues, enum)
- add_matching_objects(portable.merge_requests, enum)
- add_notes(portable.issues, enum)
- add_notes(portable.merge_requests, enum)
- end
-
- BulkImports::Pipeline::ExtractedData.new(data: data)
- end
-
- def transform(_context, object)
- body = object_body(object).dup
-
- body.gsub!(username_regex(mapped_usernames), mapped_usernames)
-
- matching_urls(object).each do |old_url, new_url|
- body.gsub!(old_url, new_url) if body.include?(old_url)
- end
-
- object.assign_attributes(body_field(object) => body)
-
- object
- end
-
- def load(_context, object)
- object.save! if object_body_changed?(object)
- end
-
- private
-
- def mapped_usernames
- @mapped_usernames ||= ::BulkImports::UsersMapper.new(context: context)
- .map_usernames.transform_keys { |key| "@#{key}" }
- .transform_values { |value| "@#{value}" }
- end
-
- def username_regex(mapped_usernames)
- @username_regex ||= Regexp.new(mapped_usernames.keys.sort_by(&:length)
- .reverse.map { |x| Regexp.escape(x) }.join('|'))
- end
-
- def add_matching_objects(collection, enum)
- collection.each_batch(of: BATCH_SIZE, column: :iid) do |batch|
- batch.each do |object|
- enum << object if object_has_reference?(object) || object_has_username?(object)
- end
- end
- end
-
- def add_notes(collection, enum)
- collection.each_batch(of: BATCH_SIZE, column: :iid) do |batch|
- batch.each do |object|
- object.notes.each_batch(of: BATCH_SIZE) do |notes_batch|
- notes_batch.each do |note|
- note.refresh_markdown_cache!
- enum << note if object_has_reference?(note) || object_has_username?(note)
- end
- end
- end
- end
- end
-
- def object_has_reference?(object)
- object_body(object)&.include?(source_full_path)
- end
-
- def object_has_username?(object)
- return false unless object_body(object)
-
- mapped_usernames.keys.any? { |old_username| object_body(object).include?(old_username) }
- end
-
- def object_body(object)
- call_object_method(object)
- end
-
- def object_body_changed?(object)
- call_object_method(object, suffix: '_changed?')
- end
-
- def call_object_method(object, suffix: nil)
- method = body_field(object)
- method = "#{method}#{suffix}" if suffix.present?
-
- object.public_send(method) # rubocop:disable GitlabSecurity/PublicSend -- the method being called is dependent on several factors
- end
-
- def body_field(object)
- object.is_a?(Note) ? 'note' : 'description'
- end
-
- def matching_urls(object)
- URI.extract(object_body(object), %w[http https]).each_with_object([]) do |url, array|
- parsed_url = URI.parse(url)
-
- next unless source_host == parsed_url.host
- next unless parsed_url.path&.start_with?("/#{source_full_path}")
-
- array << [url, new_url(parsed_url)]
- end
- end
-
- def new_url(parsed_old_url)
- parsed_old_url.host = ::Gitlab.config.gitlab.host
- parsed_old_url.port = ::Gitlab.config.gitlab.port
- parsed_old_url.scheme = ::Gitlab.config.gitlab.https ? 'https' : 'http'
- parsed_old_url.to_s.gsub!(source_full_path, portable.full_path)
- end
-
- def source_host
- @source_host ||= URI.parse(context.configuration.url).host
- end
-
- def source_full_path
- context.entity.source_full_path
- end
- end
- end
- end
-end
diff --git a/lib/bulk_imports/projects/stage.rb b/lib/bulk_imports/projects/stage.rb
index f9840320d88..eecd567f54f 100644
--- a/lib/bulk_imports/projects/stage.rb
+++ b/lib/bulk_imports/projects/stage.rb
@@ -135,7 +135,7 @@ module BulkImports
stage: 5
},
references: {
- pipeline: references_pipeline,
+ pipeline: BulkImports::Projects::Pipelines::ReferencesPipeline,
stage: 5
},
finisher: {
@@ -144,14 +144,6 @@ module BulkImports
}
}
end
-
- def references_pipeline
- if Feature.enabled?(:bulk_import_async_references_pipeline)
- BulkImports::Projects::Pipelines::ReferencesPipeline
- else
- BulkImports::Projects::Pipelines::LegacyReferencesPipeline
- end
- end
end
end
end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index dfe81524531..1f28887ff8c 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -6818,12 +6818,6 @@ msgstr ""
msgid "At least one of group_id or project_id must be specified"
msgstr ""
-msgid "At least one of your Personal Access Tokens is expired. %{generate_new}"
-msgstr ""
-
-msgid "At least one of your Personal Access Tokens will expire soon. %{generate_new}"
-msgstr ""
-
msgid "At risk"
msgstr ""
@@ -39843,6 +39837,9 @@ msgstr ""
msgid "Release|Learn more about releases"
msgstr ""
+msgid "Release|Leave blank to use the tag name as the release title."
+msgstr ""
+
msgid "Release|More information"
msgstr ""
@@ -48664,6 +48661,9 @@ msgstr ""
msgid "The name of the Jenkins project. Copy the name from the end of the URL to the project."
msgstr ""
+msgid "The namespace storage size (%{current_size}) exceeds the limit of %{size_limit} by %{exceeded_size}. You won't be able to push new code to this project. Please contact your GitLab administrator for more information."
+msgstr ""
+
msgid "The number of changes to fetch from GitLab when cloning a repository. Lower values can speed up pipeline execution. Set to %{code_open}0%{code_close} or blank to fetch all branches and tags for each job"
msgstr ""
@@ -56341,6 +56341,9 @@ msgstr ""
msgid "Your name"
msgstr ""
+msgid "Your namespace storage is full. This merge request cannot be merged. To continue, %{link_start}manage your storage usage%{link_end}."
+msgstr ""
+
msgid "Your new %{accessTokenType}"
msgstr ""
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 9a1c349c9bc..09f39506448 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -14,7 +14,8 @@ RSpec.describe 'Database schema', feature_category: :database do
# but in Search::NamespaceIndexAssignment model, only `search_index_id` is used as foreign key and indexed
search_namespace_index_assignments: [%w[search_index_id index_type]],
slack_integrations_scopes: [%w[slack_api_scope_id]],
- notes: %w[namespace_id] # this index is added in an async manner, hence it needs to be ignored in the first phase.
+ notes: %w[namespace_id], # this index is added in an async manner, hence it needs to be ignored in the first phase.
+ users: [%w[accepted_term_id]]
}.with_indifferent_access.freeze
TABLE_PARTITIONS = %w[ci_builds_metadata].freeze
diff --git a/spec/features/environments/environments_folder_spec.rb b/spec/features/environments/environments_folder_spec.rb
index ed5e3e98338..6b0306a70a8 100644
--- a/spec/features/environments/environments_folder_spec.rb
+++ b/spec/features/environments/environments_folder_spec.rb
@@ -9,6 +9,10 @@ RSpec.describe 'Environments Folder page', :js, feature_category: :environment_m
let_it_be(:user) { create(:user) }
let!(:envs) { create_list(:environment, 4, :with_folders, project: project, folder: folder_name) }
+ def get_env_name(environment)
+ environment.name.split('/').last
+ end
+
before_all do
project.add_role(user, :developer)
end
@@ -27,6 +31,11 @@ RSpec.describe 'Environments Folder page', :js, feature_category: :environment_m
it 'renders the header with a folder name' do
expect(page).to have_content("Environments / #{folder_name}")
end
+
+ it 'renders the environments' do
+ expect(page).not_to have_content('production')
+ envs.each { |env| expect(page).to have_content(get_env_name(env)) }
+ end
end
describe 'legacy folders page' do
@@ -40,7 +49,7 @@ RSpec.describe 'Environments Folder page', :js, feature_category: :environment_m
it 'user opens folder view' do
expect(page).to have_content("Environments / #{folder_name}")
expect(page).not_to have_content('production')
- envs.each { |env| expect(page).to have_content(env.name.split('/').last) }
+ envs.each { |env| expect(page).to have_content(get_env_name(env)) }
end
end
end
diff --git a/spec/finders/projects/ml/model_finder_spec.rb b/spec/finders/projects/ml/model_finder_spec.rb
index a2c2836a63d..0395e387c8f 100644
--- a/spec/finders/projects/ml/model_finder_spec.rb
+++ b/spec/finders/projects/ml/model_finder_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Projects::Ml::ModelFinder, feature_category: :mlops do
let_it_be(:project) { create(:project) }
let_it_be(:model1) { create(:ml_models, :with_versions, project: project) }
let_it_be(:model2) { create(:ml_models, :with_versions, project: project) }
- let_it_be(:model3) { create(:ml_models, name: "#{model1.name}_1", project: project) }
+ let_it_be(:model3) { create(:ml_models, name: "#{model1.name}_1", project: project, updated_at: 1.week.ago) }
let_it_be(:other_model) { create(:ml_models) }
let_it_be(:project_models) { [model1, model2, model3] }
@@ -52,6 +52,7 @@ RSpec.describe Projects::Ml::ModelFinder, feature_category: :mlops do
'by column' | 'name' | 'ASC' | [0, 2, 1]
'invalid sort' | nil | 'UP' | [2, 1, 0]
'invalid order by' | 'INVALID' | nil | [2, 1, 0]
+ 'order by updated_at' | 'updated_at' | nil | [1, 0, 2]
end
with_them do
let(:params) { { order_by: order_by, sort: direction } }
diff --git a/spec/fixtures/api/schemas/ml/get_model.json b/spec/fixtures/api/schemas/ml/get_model.json
index de4d8e2eb07..2e97fab5134 100644
--- a/spec/fixtures/api/schemas/ml/get_model.json
+++ b/spec/fixtures/api/schemas/ml/get_model.json
@@ -19,7 +19,7 @@
"type": "string"
},
"user_id": {
- "type": "integer"
+ "type": "string"
},
"creation_timestamp": {
"type": "integer"
diff --git a/spec/fixtures/api/schemas/ml/list_models.json b/spec/fixtures/api/schemas/ml/list_models.json
new file mode 100644
index 00000000000..eaeaa268dde
--- /dev/null
+++ b/spec/fixtures/api/schemas/ml/list_models.json
@@ -0,0 +1,53 @@
+{
+ "type": "object",
+ "required": [
+ "registered_models",
+ "next_page_token"
+ ],
+ "properties": {
+ "registered_models": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required": [
+
+ ],
+ "properties": {
+ "name": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "user_id": {
+ "type": "string"
+ },
+ "creation_timestamp": {
+ "type": "integer"
+ },
+ "last_updated_timestamp": {
+ "type": "integer"
+ },
+ "tags": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required": [
+ "key",
+ "value"
+ ],
+ "properties": {
+ "key": {
+ "type": "string"
+ },
+ "value": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/spec/frontend/environments/folder/environments_folder_app_spec.js b/spec/frontend/environments/folder/environments_folder_app_spec.js
index 6ed279517bd..262e742ba5c 100644
--- a/spec/frontend/environments/folder/environments_folder_app_spec.js
+++ b/spec/frontend/environments/folder/environments_folder_app_spec.js
@@ -1,23 +1,80 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlSkeletonLoader } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import EnvironmentsFolderAppComponent from '~/environments/folder/environments_folder_app.vue';
+import EnvironmentItem from '~/environments/components/new_environment_item.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { resolvedFolder } from '../graphql/mock_data';
+
+Vue.use(VueApollo);
describe('EnvironmentsFolderAppComponent', () => {
let wrapper;
const mockFolderName = 'folders';
- const createWrapper = () => {
+ let environmentFolderMock;
+
+ const createApolloProvider = () => {
+ const mockResolvers = {
+ Query: {
+ folder: environmentFolderMock,
+ },
+ };
+
+ return createMockApollo([], mockResolvers);
+ };
+
+ beforeEach(() => {
+ environmentFolderMock = jest.fn();
+ });
+
+ const emptyFolderData = {
+ environments: [],
+ activeCount: 0,
+ stoppedCount: 0,
+ __typename: 'LocalEnvironmentFolder',
+ };
+
+ const createWrapper = ({ folderData } = {}) => {
+ environmentFolderMock.mockReturnValue(folderData || emptyFolderData);
+
+ const apolloProvider = createApolloProvider();
+
wrapper = shallowMountExtended(EnvironmentsFolderAppComponent, {
+ apolloProvider,
propsData: {
folderName: mockFolderName,
+ folderPath: '/gitlab-org/test-project/-/environments/folder/dev',
},
});
};
const findHeader = () => wrapper.findByTestId('folder-name');
+ const findEnvironmentItems = () => wrapper.findAllComponents(EnvironmentItem);
+ const findSkeletonLoaders = () => wrapper.findAllComponents(GlSkeletonLoader);
it('should render a header with the folder name', () => {
createWrapper();
expect(findHeader().text()).toMatchInterpolatedText(`Environments / ${mockFolderName}`);
});
+
+ it('should show skeletons while loading', () => {
+ createWrapper();
+ expect(findSkeletonLoaders().length).toBe(3);
+ });
+
+ describe('when environments are loaded', () => {
+ beforeEach(async () => {
+ createWrapper({ folderData: resolvedFolder });
+ await waitForPromises();
+ });
+
+ it('should list environmnets in folder', () => {
+ const items = findEnvironmentItems();
+ expect(items.length).toBe(resolvedFolder.environments.length);
+ });
+ });
});
diff --git a/spec/lib/api/ml/mlflow/api_helpers_spec.rb b/spec/lib/api/ml/mlflow/api_helpers_spec.rb
index 757a73ed612..1f2490fe5eb 100644
--- a/spec/lib/api/ml/mlflow/api_helpers_spec.rb
+++ b/spec/lib/api/ml/mlflow/api_helpers_spec.rb
@@ -61,4 +61,47 @@ RSpec.describe API::Ml::Mlflow::ApiHelpers, feature_category: :mlops do
end
end
end
+
+ describe '#model_order_params' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { model_order_params(params) }
+
+ where(:input, :order_by, :sort) do
+ '' | 'name' | 'asc'
+ 'name' | 'name' | 'asc'
+ 'name DESC' | 'name' | 'desc'
+ 'last_updated_timestamp' | 'updated_at' | 'asc'
+ 'last_updated_timestamp asc' | 'updated_at' | 'asc'
+ 'last_updated_timestamp DESC' | 'updated_at' | 'desc'
+ end
+ with_them do
+ let(:params) { { order_by: input } }
+
+ it 'is correct' do
+ is_expected.to include({ order_by: order_by, sort: sort })
+ end
+ end
+ end
+
+ describe '#model_filter_params' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { model_filter_params(params) }
+
+ where(:input, :output) do
+ '' | {}
+ 'name=""' | { name: '' }
+ 'name=foo' | { name: 'foo' }
+ 'name="foo"' | { name: 'foo' }
+ 'invalid="foo"' | {}
+ end
+ with_them do
+ let(:params) { { filter: input } }
+
+ it 'is correct' do
+ is_expected.to eq(output)
+ end
+ end
+ end
end
diff --git a/spec/lib/bulk_imports/projects/pipelines/legacy_references_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/legacy_references_pipeline_spec.rb
deleted file mode 100644
index 163669d4f6e..00000000000
--- a/spec/lib/bulk_imports/projects/pipelines/legacy_references_pipeline_spec.rb
+++ /dev/null
@@ -1,268 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Projects::Pipelines::LegacyReferencesPipeline, feature_category: :importers do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
- let_it_be(:bulk_import) { create(:bulk_import, user: user) }
- let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import, url: 'https://my.gitlab.com') }
- let_it_be(:entity) do
- create(
- :bulk_import_entity,
- :project_entity,
- project: project,
- bulk_import: bulk_import,
- source_full_path: 'source/full/path'
- )
- end
-
- let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
- let(:issue) { create(:issue, project: project, description: 'https://my.gitlab.com/source/full/path/-/issues/1') }
- let(:mr) do
- create(
- :merge_request,
- source_project: project,
- description: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @source_username? @bob, @alice!'
- )
- end
-
- let(:issue_note) do
- create(
- :note,
- project: project,
- noteable: issue,
- note: 'https://my.gitlab.com/source/full/path/-/issues/1 @older_username, not_a@username, and @old_username.'
- )
- end
-
- let(:mr_note) do
- create(
- :note,
- project: project,
- noteable: mr,
- note: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @same_username'
- )
- end
-
- let(:interchanged_usernames) do
- create(
- :note,
- project: project,
- noteable: mr,
- note: '@manuelgrabowski-admin, @boaty-mc-boatface'
- )
- end
-
- let(:old_note_html) { 'old note_html' }
- let(:system_note) do
- create(
- :note,
- project: project,
- system: true,
- noteable: issue,
- note: "mentioned in merge request !#{mr.iid} created by @old_username",
- note_html: old_note_html
- )
- end
-
- let(:username_system_note) do
- create(
- :note,
- project: project,
- system: true,
- noteable: issue,
- note: "mentioned in merge request created by @source_username.",
- note_html: 'empty'
- )
- end
-
- subject(:pipeline) { described_class.new(context) }
-
- before do
- project.add_owner(user)
-
- allow(Gitlab::Cache::Import::Caching)
- .to receive(:values_from_hash)
- .and_return({
- 'old_username' => 'new_username',
- 'older_username' => 'newer_username',
- 'source_username' => 'destination_username',
- 'bob' => 'alice-gdk',
- 'alice' => 'bob-gdk',
- 'manuelgrabowski' => 'manuelgrabowski-admin',
- 'manuelgrabowski-admin' => 'manuelgrabowski',
- 'boaty-mc-boatface' => 'boatymcboatface',
- 'boatymcboatface' => 'boaty-mc-boatface'
- })
- end
-
- def create_project_data
- [issue, mr, issue_note, mr_note, system_note, username_system_note]
- end
-
- def create_username_project_data
- [username_system_note]
- end
-
- describe '#extract' do
- it 'returns ExtractedData containing issues, mrs & their notes' do
- create_project_data
-
- extracted_data = subject.extract(context)
-
- expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData)
- expect(extracted_data.data).to contain_exactly(issue, mr, issue_note, system_note, username_system_note, mr_note)
- expect(system_note.note_html).not_to eq(old_note_html)
- expect(system_note.note_html)
- .to include("class=\"gfm gfm-merge_request\">!#{mr.iid}")
- .and include(project.full_path.to_s)
- .and include("@old_username")
- expect(username_system_note.note_html)
- .to include("@source_username")
- end
-
- context 'when object body is nil' do
- let(:issue) { create(:issue, project: project, description: nil) }
-
- it 'returns ExtractedData not containing the object' do
- extracted_data = subject.extract(context)
-
- expect(extracted_data.data).to contain_exactly(issue_note, mr, mr_note)
- end
- end
- end
-
- describe '#transform', :clean_gitlab_redis_cache do
- it 'updates matching urls and usernames with new ones' do
- transformed_mr = subject.transform(context, mr)
- transformed_note = subject.transform(context, mr_note)
- transformed_issue = subject.transform(context, issue)
- transformed_issue_note = subject.transform(context, issue_note)
- transformed_system_note = subject.transform(context, system_note)
- transformed_username_system_note = subject.transform(context, username_system_note)
-
- expected_url = URI('')
- expected_url.scheme = ::Gitlab.config.gitlab.https ? 'https' : 'http'
- expected_url.host = ::Gitlab.config.gitlab.host
- expected_url.port = ::Gitlab.config.gitlab.port
- expected_url.path = "/#{project.full_path}/-/merge_requests/#{mr.iid}"
-
- expect(transformed_issue_note.note).not_to include("@older_username")
- expect(transformed_mr.description).not_to include("@source_username")
- expect(transformed_system_note.note).not_to include("@old_username")
- expect(transformed_username_system_note.note).not_to include("@source_username")
-
- expect(transformed_issue.description)
- .to eq("http://localhost:80/#{transformed_issue.namespace.full_path}/-/issues/1")
- expect(transformed_mr.description).to eq("#{expected_url} @destination_username? @alice-gdk, @bob-gdk!")
- expect(transformed_note.note).to eq("#{expected_url} @same_username")
- expect(transformed_issue_note.note).to include("@newer_username, not_a@username, and @new_username.")
- expect(transformed_system_note.note).to eq("mentioned in merge request !#{mr.iid} created by @new_username")
- expect(transformed_username_system_note.note).to include("@destination_username.")
- end
-
- it 'handles situations where old usernames are substrings of new usernames' do
- transformed_mr = subject.transform(context, mr)
-
- expect(transformed_mr.description).to include("@alice-gdk")
- expect(transformed_mr.description).not_to include("@bob-gdk-gdk")
- end
-
- it 'handles situations where old and new usernames are interchanged' do
- # e.g
- # |------------------------|-------------------------|
- # | old_username | new_username |
- # |------------------------|-------------------------|
- # | @manuelgrabowski-admin | @manuelgrabowski |
- # | @manuelgrabowski | @manuelgrabowski-admin |
- # |------------------------|-------------------------|
-
- transformed_interchanged_usernames = subject.transform(context, interchanged_usernames)
-
- expect(transformed_interchanged_usernames.note).to include("@manuelgrabowski")
- expect(transformed_interchanged_usernames.note).to include("@boatymcboatface")
- expect(transformed_interchanged_usernames.note).not_to include("@manuelgrabowski-admin")
- expect(transformed_interchanged_usernames.note).not_to include("@boaty-mc-boatface")
- end
-
- context 'when object does not have reference or username' do
- it 'returns object unchanged' do
- issue.update!(description: 'foo')
-
- transformed_issue = subject.transform(context, issue)
-
- expect(transformed_issue.description).to eq('foo')
- end
- end
-
- context 'when there are not matched urls or usernames' do
- let(:description) { 'https://my.gitlab.com/another/project/path/-/issues/1 @random_username' }
-
- shared_examples 'returns object unchanged' do
- it 'returns object unchanged' do
- issue.update!(description: description)
-
- transformed_issue = subject.transform(context, issue)
-
- expect(transformed_issue.description).to eq(description)
- end
- end
-
- include_examples 'returns object unchanged'
-
- context 'when url path does not start with source full path' do
- let(:description) { 'https://my.gitlab.com/another/source/full/path/-/issues/1' }
-
- include_examples 'returns object unchanged'
- end
-
- context 'when host does not match and url path starts with source full path' do
- let(:description) { 'https://another.gitlab.com/source/full/path/-/issues/1' }
-
- include_examples 'returns object unchanged'
- end
-
- context 'when url does not match at all' do
- let(:description) { 'https://website.example/foo/bar' }
-
- include_examples 'returns object unchanged'
- end
- end
- end
-
- describe '#load' do
- it 'saves the object when object body changed' do
- transformed_issue = subject.transform(context, issue)
- transformed_note = subject.transform(context, mr_note)
- transformed_mr = subject.transform(context, mr)
- transformed_issue_note = subject.transform(context, issue_note)
- transformed_system_note = subject.transform(context, system_note)
-
- expect(transformed_issue).to receive(:save!)
- expect(transformed_note).to receive(:save!)
- expect(transformed_mr).to receive(:save!)
- expect(transformed_issue_note).to receive(:save!)
- expect(transformed_system_note).to receive(:save!)
-
- subject.load(context, transformed_issue)
- subject.load(context, transformed_note)
- subject.load(context, transformed_mr)
- subject.load(context, transformed_issue_note)
- subject.load(context, transformed_system_note)
- end
-
- context 'when object body is not changed' do
- it 'does not save the object' do
- expect(mr).not_to receive(:save!)
- expect(mr_note).not_to receive(:save!)
- expect(system_note).not_to receive(:save!)
-
- subject.load(context, mr)
- subject.load(context, mr_note)
- subject.load(context, system_note)
- end
- end
- end
-end
diff --git a/spec/lib/bulk_imports/projects/stage_spec.rb b/spec/lib/bulk_imports/projects/stage_spec.rb
index 8965ee6547c..8b06e0bb2ee 100644
--- a/spec/lib/bulk_imports/projects/stage_spec.rb
+++ b/spec/lib/bulk_imports/projects/stage_spec.rb
@@ -21,24 +21,6 @@ RSpec.describe BulkImports::Projects::Stage, feature_category: :importers do
expect(pipelines.last).to match(hash_including({ pipeline: BulkImports::Common::Pipelines::EntityFinisher }))
end
- context 'when bulk_import_async_references_pipeline feature flag is disabled' do
- before do
- stub_feature_flags(bulk_import_async_references_pipeline: false)
- end
-
- it 'uses the legacy references pipeline' do
- pipelines = subject.pipelines
-
- expect(pipelines).to include(
- hash_including({ stage: 5, pipeline: BulkImports::Projects::Pipelines::LegacyReferencesPipeline })
- )
-
- expect(pipelines).not_to include(
- hash_including({ stage: 5, pipeline: BulkImports::Projects::Pipelines::ReferencesPipeline })
- )
- end
- end
-
it 'only have pipelines with valid keys' do
pipeline_keys = subject.pipelines.collect(&:keys).flatten.uniq
allowed_keys = %i[pipeline stage minimum_source_version maximum_source_version]
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index c9b2e21d934..b349c74a498 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe Gitlab::ErrorTracking, feature_category: :shared do
end
after do
- Sentry.get_current_scope.clear
+ clear_sentry_settings
end
describe '.track_and_raise_for_dev_exception' do
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index 3fc486a8984..8e9529da1b4 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -55,8 +55,7 @@ RSpec.describe Gitlab::HTTP, feature_category: :shared do
end
context 'when there is a DB call in the concurrent thread' do
- it 'raises Gitlab::Utils::ConcurrentRubyThreadIsUsedError error',
- quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/432145' do
+ it 'raises Gitlab::Utils::ConcurrentRubyThreadIsUsedError error' do
stub_request(:get, 'http://example.org').to_return(status: 200, body: 'hello world')
result = described_class.get('http://example.org', async: true) do |_fragment|
diff --git a/spec/requests/api/helpers_spec.rb b/spec/requests/api/helpers_spec.rb
index 7304437bc42..bc9e1ad15f5 100644
--- a/spec/requests/api/helpers_spec.rb
+++ b/spec/requests/api/helpers_spec.rb
@@ -320,6 +320,10 @@ RSpec.describe API::Helpers, :enable_admin_mode, feature_category: :system_acces
Gitlab::ErrorTracking.configure
end
+ after do
+ clear_sentry_settings
+ end
+
it 'does not report a MethodNotAllowed exception to Sentry' do
exception = Grape::Exceptions::MethodNotAllowed.new({ 'X-GitLab-Test' => '1' })
allow(exception).to receive(:backtrace).and_return(caller)
diff --git a/spec/requests/api/ml/mlflow/registered_models_spec.rb b/spec/requests/api/ml/mlflow/registered_models_spec.rb
index 41a0c1d47ef..4dd6fe31258 100644
--- a/spec/requests/api/ml/mlflow/registered_models_spec.rb
+++ b/spec/requests/api/ml/mlflow/registered_models_spec.rb
@@ -240,4 +240,44 @@ RSpec.describe API::Ml::Mlflow::RegisteredModels, feature_category: :mlops do
it_behaves_like 'MLflow|a read/write model registry resource'
end
end
+
+ describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/registered-models/search' do
+ let_it_be(:model2) do
+ create(:ml_models, :with_metadata, project: project)
+ end
+
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/registered-models/search" }
+
+ it 'returns all the models', :aggregate_failures do
+ is_expected.to have_gitlab_http_status(:ok)
+ is_expected.to match_response_schema('ml/list_models')
+ expect(json_response["registered_models"].count).to be(2)
+ end
+
+ context "with a valid filter supplied" do
+ let(:filter) { "name='#{model2.name}'" }
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/registered-models/search?filter=#{filter}" }
+
+ it 'returns only the models for the given filter' do
+ is_expected.to have_gitlab_http_status(:ok)
+ expect(json_response["registered_models"].count).to be(1)
+ end
+ end
+
+ context "with an invalid filter supplied" do
+ let(:filter) { "description='foo'" }
+ let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/registered-models/search?filter=#{filter}" }
+
+ it 'returns an error' do
+ is_expected.to have_gitlab_http_status(:bad_request)
+
+ expect(json_response).to include({ 'error_code' => 'INVALID_PARAMETER_VALUE' })
+ end
+ end
+
+ describe 'Error States' do
+ it_behaves_like 'MLflow|an authenticated resource'
+ it_behaves_like 'MLflow|a read-only model registry resource'
+ end
+ end
end
diff --git a/spec/support/helpers/stub_configuration.rb b/spec/support/helpers/stub_configuration.rb
index e043d1249b9..9774d52228d 100644
--- a/spec/support/helpers/stub_configuration.rb
+++ b/spec/support/helpers/stub_configuration.rb
@@ -120,6 +120,12 @@ module StubConfiguration
.to receive(:sentry_clientside_dsn) { clientside_dsn }
end
+ def clear_sentry_settings
+ ::Sentry.get_current_scope.clear
+ ::Sentry.close
+ ::Raven.configuration = ::Raven::Configuration.new
+ end
+
def stub_microsoft_graph_mailer_setting(messages)
allow(Gitlab.config.microsoft_graph_mailer).to receive_messages(to_settings(messages))
end
diff --git a/spec/support/shared_examples/lib/gitlab/bitbucket_server_import/object_import_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/bitbucket_server_import/object_import_shared_examples.rb
index 4eae8632467..45248f57683 100644
--- a/spec/support/shared_examples/lib/gitlab/bitbucket_server_import/object_import_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/bitbucket_server_import/object_import_shared_examples.rb
@@ -65,5 +65,17 @@ RSpec.shared_examples Gitlab::BitbucketServerImport::ObjectImporter do
it_behaves_like 'notifies the waiter'
end
+
+ context 'when project import has failed' do
+ let_it_be(:project_id) { create(:project, :import_failed).id }
+
+ it 'does not call the importer' do
+ expect_next(worker.importer_class).not_to receive(:execute)
+
+ worker.perform(project_id, {}, waiter_key)
+ end
+
+ it_behaves_like 'notifies the waiter'
+ end
end
end
diff --git a/workhorse/internal/redis/keywatcher.go b/workhorse/internal/redis/keywatcher.go
index ddb838121b7..efc14f1cb5e 100644
--- a/workhorse/internal/redis/keywatcher.go
+++ b/workhorse/internal/redis/keywatcher.go
@@ -21,11 +21,11 @@ type KeyWatcher struct {
subscribers map[string][]chan string
shutdown chan struct{}
reconnectBackoff backoff.Backoff
- redisConn *redis.Client
+ redisConn *redis.Client // can be nil
conn *redis.PubSub
}
-func NewKeyWatcher() *KeyWatcher {
+func NewKeyWatcher(redisConn *redis.Client) *KeyWatcher {
return &KeyWatcher{
shutdown: make(chan struct{}),
reconnectBackoff: backoff.Backoff{
@@ -34,6 +34,7 @@ func NewKeyWatcher() *KeyWatcher {
Factor: 2,
Jitter: true,
},
+ redisConn: redisConn,
}
}
@@ -125,16 +126,13 @@ func (kw *KeyWatcher) receivePubSubStream(ctx context.Context, pubsub *redis.Pub
}
}
-func (kw *KeyWatcher) Process(client *redis.Client) {
+func (kw *KeyWatcher) Process() {
log.Info("keywatcher: starting process loop")
ctx := context.Background() // lint:allow context.Background
- kw.mu.Lock()
- kw.redisConn = client
- kw.mu.Unlock()
for {
- pubsub := client.Subscribe(ctx, []string{}...)
+ pubsub := kw.redisConn.Subscribe(ctx, []string{}...)
if err := pubsub.Ping(ctx); err != nil {
log.WithError(fmt.Errorf("keywatcher: %v", err)).Error()
time.Sleep(kw.reconnectBackoff.Duration())
diff --git a/workhorse/internal/redis/keywatcher_test.go b/workhorse/internal/redis/keywatcher_test.go
index bca4ca43a64..0e3278f2d26 100644
--- a/workhorse/internal/redis/keywatcher_test.go
+++ b/workhorse/internal/redis/keywatcher_test.go
@@ -7,6 +7,8 @@ import (
"testing"
"time"
+ "github.com/redis/go-redis/v9"
+ "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
@@ -18,24 +20,28 @@ const (
runnerKey = "runner:build_queue:10"
)
-func initRdb() {
- buf, _ := os.ReadFile("../../config.toml")
- cfg, _ := config.LoadConfig(string(buf))
- Configure(cfg.Redis)
+func initRdb(t *testing.T) *redis.Client {
+ buf, err := os.ReadFile("../../config.toml")
+ require.NoError(t, err)
+ cfg, err := config.LoadConfig(string(buf))
+ require.NoError(t, err)
+ rdb, err := Configure(cfg.Redis)
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ assert.NoError(t, rdb.Close())
+ })
+ return rdb
}
-func (kw *KeyWatcher) countSubscribers(key string) int {
+func countSubscribers(kw *KeyWatcher, key string) int {
kw.mu.Lock()
defer kw.mu.Unlock()
return len(kw.subscribers[key])
}
// Forces a run of the `Process` loop against a mock PubSubConn.
-func (kw *KeyWatcher) processMessages(t *testing.T, numWatchers int, value string, ready chan<- struct{}, wg *sync.WaitGroup) {
- kw.mu.Lock()
- kw.redisConn = rdb
+func processMessages(t *testing.T, kw *KeyWatcher, numWatchers int, value string, ready chan<- struct{}, wg *sync.WaitGroup) {
psc := kw.redisConn.Subscribe(ctx, []string{}...)
- kw.mu.Unlock()
errC := make(chan error)
go func() { errC <- kw.receivePubSubStream(ctx, psc) }()
@@ -48,7 +54,7 @@ func (kw *KeyWatcher) processMessages(t *testing.T, numWatchers int, value strin
close(ready)
require.Eventually(t, func() bool {
- return kw.countSubscribers(runnerKey) == numWatchers
+ return countSubscribers(kw, runnerKey) == numWatchers
}, time.Second, time.Millisecond)
// send message after listeners are ready
@@ -74,7 +80,7 @@ type keyChangeTestCase struct {
}
func TestKeyChangesInstantReturn(t *testing.T) {
- initRdb()
+ rdb := initRdb(t)
testCases := []keyChangeTestCase{
// WatchKeyStatusAlreadyChanged
@@ -118,13 +124,10 @@ func TestKeyChangesInstantReturn(t *testing.T) {
rdb.Set(ctx, runnerKey, tc.returnValue, 0)
}
- defer func() {
- rdb.FlushDB(ctx)
- }()
+ defer rdb.FlushDB(ctx)
- kw := NewKeyWatcher()
+ kw := NewKeyWatcher(rdb)
defer kw.Shutdown()
- kw.redisConn = rdb
kw.conn = kw.redisConn.Subscribe(ctx, []string{}...)
val, err := kw.WatchKey(ctx, runnerKey, tc.watchValue, tc.timeout)
@@ -136,7 +139,7 @@ func TestKeyChangesInstantReturn(t *testing.T) {
}
func TestKeyChangesWhenWatching(t *testing.T) {
- initRdb()
+ rdb := initRdb(t)
testCases := []keyChangeTestCase{
// WatchKeyStatusSeenChange
@@ -170,11 +173,9 @@ func TestKeyChangesWhenWatching(t *testing.T) {
rdb.Set(ctx, runnerKey, tc.returnValue, 0)
}
- kw := NewKeyWatcher()
+ kw := NewKeyWatcher(rdb)
defer kw.Shutdown()
- defer func() {
- rdb.FlushDB(ctx)
- }()
+ defer rdb.FlushDB(ctx)
wg := &sync.WaitGroup{}
wg.Add(1)
@@ -189,13 +190,13 @@ func TestKeyChangesWhenWatching(t *testing.T) {
require.Equal(t, tc.expectedStatus, val, "Expected value")
}()
- kw.processMessages(t, 1, tc.processedValue, ready, wg)
+ processMessages(t, kw, 1, tc.processedValue, ready, wg)
})
}
}
func TestKeyChangesParallel(t *testing.T) {
- initRdb()
+ rdb := initRdb(t)
testCases := []keyChangeTestCase{
{
@@ -222,15 +223,13 @@ func TestKeyChangesParallel(t *testing.T) {
rdb.Set(ctx, runnerKey, tc.returnValue, 0)
}
- defer func() {
- rdb.FlushDB(ctx)
- }()
+ defer rdb.FlushDB(ctx)
wg := &sync.WaitGroup{}
wg.Add(runTimes)
ready := make(chan struct{})
- kw := NewKeyWatcher()
+ kw := NewKeyWatcher(rdb)
defer kw.Shutdown()
for i := 0; i < runTimes; i++ {
@@ -244,16 +243,15 @@ func TestKeyChangesParallel(t *testing.T) {
}()
}
- kw.processMessages(t, runTimes, tc.processedValue, ready, wg)
+ processMessages(t, kw, runTimes, tc.processedValue, ready, wg)
})
}
}
func TestShutdown(t *testing.T) {
- initRdb()
+ rdb := initRdb(t)
- kw := NewKeyWatcher()
- kw.redisConn = rdb
+ kw := NewKeyWatcher(rdb)
kw.conn = kw.redisConn.Subscribe(ctx, []string{}...)
defer kw.Shutdown()
@@ -272,14 +270,14 @@ func TestShutdown(t *testing.T) {
go func() {
defer wg.Done()
- require.Eventually(t, func() bool { return kw.countSubscribers(runnerKey) == 1 }, 10*time.Second, time.Millisecond)
+ require.Eventually(t, func() bool { return countSubscribers(kw, runnerKey) == 1 }, 10*time.Second, time.Millisecond)
kw.Shutdown()
}()
wg.Wait()
- require.Eventually(t, func() bool { return kw.countSubscribers(runnerKey) == 0 }, 10*time.Second, time.Millisecond)
+ require.Eventually(t, func() bool { return countSubscribers(kw, runnerKey) == 0 }, 10*time.Second, time.Millisecond)
// Adding a key after the shutdown should result in an immediate response
var val WatchKeyStatus
diff --git a/workhorse/internal/redis/redis.go b/workhorse/internal/redis/redis.go
index e21dae916e4..1fd30b05de5 100644
--- a/workhorse/internal/redis/redis.go
+++ b/workhorse/internal/redis/redis.go
@@ -17,7 +17,6 @@ import (
)
var (
- rdb *redis.Client
// found in https://github.com/redis/go-redis/blob/c7399b6a17d7d3e2a57654528af91349f2468529/sentinel.go#L626
errSentinelMasterAddr error = errors.New("redis: all sentinels specified in configuration are unreachable")
@@ -129,16 +128,13 @@ func (s sentinelInstrumentationHook) ProcessPipelineHook(next redis.ProcessPipel
}
}
-func GetRedisClient() *redis.Client {
- return rdb
-}
-
// Configure redis-connection
-func Configure(cfg *config.RedisConfig) error {
+func Configure(cfg *config.RedisConfig) (*redis.Client, error) {
if cfg == nil {
- return nil
+ return nil, nil
}
+ var rdb *redis.Client
var err error
if len(cfg.Sentinel) > 0 {
@@ -147,7 +143,7 @@ func Configure(cfg *config.RedisConfig) error {
rdb, err = configureRedis(cfg)
}
- return err
+ return rdb, err
}
func configureRedis(cfg *config.RedisConfig) (*redis.Client, error) {
diff --git a/workhorse/internal/redis/redis_test.go b/workhorse/internal/redis/redis_test.go
index d16a7a02761..cbceb7e6183 100644
--- a/workhorse/internal/redis/redis_test.go
+++ b/workhorse/internal/redis/redis_test.go
@@ -29,8 +29,8 @@ func mockRedisServer(t *testing.T, connectReceived *atomic.Value) string {
}
func TestConfigureNoConfig(t *testing.T) {
- rdb = nil
- Configure(nil)
+ rdb, err := Configure(nil)
+ require.NoError(t, err)
require.Nil(t, rdb, "rdb client should be nil")
}
@@ -57,15 +57,15 @@ func TestConfigureValidConfigX(t *testing.T) {
parsedURL := helper.URLMustParse(tc.scheme + "://" + a)
cfg := &config.RedisConfig{URL: config.TomlURL{URL: *parsedURL}}
- Configure(cfg)
+ rdb, err := Configure(cfg)
+ require.NoError(t, err)
+ defer rdb.Close()
- require.NotNil(t, GetRedisClient().Conn(), "Pool should not be nil")
+ require.NotNil(t, rdb.Conn(), "Pool should not be nil")
// goredis initialise connections lazily
rdb.Ping(context.Background())
require.True(t, connectReceived.Load().(bool))
-
- rdb = nil
})
}
}
@@ -96,15 +96,15 @@ func TestConnectToSentinel(t *testing.T) {
}
cfg := &config.RedisConfig{Sentinel: sentinelUrls}
- Configure(cfg)
+ rdb, err := Configure(cfg)
+ require.NoError(t, err)
+ defer rdb.Close()
- require.NotNil(t, GetRedisClient().Conn(), "Pool should not be nil")
+ require.NotNil(t, rdb.Conn(), "Pool should not be nil")
// goredis initialise connections lazily
rdb.Ping(context.Background())
require.True(t, connectReceived.Load().(bool))
-
- rdb = nil
})
}
}
diff --git a/workhorse/main.go b/workhorse/main.go
index 3043ae50a22..5743355594f 100644
--- a/workhorse/main.go
+++ b/workhorse/main.go
@@ -225,13 +225,14 @@ func run(boot bootConfig, cfg config.Config) error {
log.Info("Using redis/go-redis")
- redisKeyWatcher := redis.NewKeyWatcher()
- if err := redis.Configure(cfg.Redis); err != nil {
+ rdb, err := redis.Configure(cfg.Redis)
+ if err != nil {
log.WithError(err).Error("unable to configure redis client")
}
+ redisKeyWatcher := redis.NewKeyWatcher(rdb)
- if rdb := redis.GetRedisClient(); rdb != nil {
- go redisKeyWatcher.Process(rdb)
+ if rdb != nil {
+ go redisKeyWatcher.Process()
}
watchKeyFn := redisKeyWatcher.WatchKey