Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
ab37c8f637
commit
08775893a8
|
|
@ -30,6 +30,8 @@ include:
|
|||
needs:
|
||||
- review-deploy
|
||||
- download-knapsack-report
|
||||
- pipeline: $PARENT_PIPELINE_ID
|
||||
job: retrieve-tests-metadata
|
||||
variables:
|
||||
GIT_LFS_SKIP_SMUDGE: 1
|
||||
WD_INSTALL_DIR: /usr/local/bin
|
||||
|
|
|
|||
|
|
@ -24,7 +24,6 @@ See [the general developer security release guidelines](https://gitlab.com/gitla
|
|||
- Please see the security release [Code reviews and Approvals] documentation for details on which AppSec team member to ping for approval.
|
||||
- Trigger the [`e2e:package-and-test` job]. The docker image generated will be used by the AppSec engineer to validate the security vulnerability has been remediated.
|
||||
- [ ] For a backport MR targeting a versioned stable branch (`X-Y-stable-ee`).
|
||||
- [ ] Milestone is set to the version this backport applies to. A closed milestone can be assigned via [quick actions].
|
||||
- [ ] Ensure it's approved by the same maintainer that reviewed and approved the merge request targeting the default branch.
|
||||
- [ ] Ensure this merge request and the related security issue have a `~severity::x` label
|
||||
|
||||
|
|
@ -32,7 +31,6 @@ See [the general developer security release guidelines](https://gitlab.com/gitla
|
|||
|
||||
## Maintainer checklist
|
||||
|
||||
- [ ] Correct milestone is applied and the title is matching across all backports.
|
||||
- [ ] Assigned (_not_ as reviewer) to `@gitlab-release-tools-bot` with passing CI pipelines.
|
||||
- [ ] Correct `~severity::x` label is applied to this merge request and the related security issue.
|
||||
|
||||
|
|
|
|||
|
|
@ -1098,7 +1098,6 @@ RSpec/BeforeAllRoleAssignment:
|
|||
- 'spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/pipelines/commit_notes_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/pipelines/legacy_references_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/pipelines/pipeline_schedules_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/pipelines/project_pipeline_spec.rb'
|
||||
|
|
|
|||
|
|
@ -1772,7 +1772,6 @@ RSpec/NamedSubject:
|
|||
- 'spec/lib/bulk_imports/projects/graphql/get_project_query_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/graphql/get_repository_query_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/pipelines/legacy_references_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/pipelines/references_pipeline_spec.rb'
|
||||
- 'spec/lib/bulk_imports/projects/stage_spec.rb'
|
||||
- 'spec/lib/bulk_imports/source_url_builder_spec.rb'
|
||||
|
|
|
|||
|
|
@ -1,12 +1,47 @@
|
|||
<script>
|
||||
import { GlSkeletonLoader } from '@gitlab/ui';
|
||||
import { s__ } from '~/locale';
|
||||
import folderQuery from '../graphql/queries/folder.query.graphql';
|
||||
import EnvironmentItem from '../components/new_environment_item.vue';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlSkeletonLoader,
|
||||
EnvironmentItem,
|
||||
},
|
||||
props: {
|
||||
folderName: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
folderPath: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
apollo: {
|
||||
folder: {
|
||||
query: folderQuery,
|
||||
variables() {
|
||||
return {
|
||||
environment: this.environmentQueryData,
|
||||
scope: '',
|
||||
search: '',
|
||||
perPage: 10,
|
||||
};
|
||||
},
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
environmentQueryData() {
|
||||
return { folderPath: this.folderPath };
|
||||
},
|
||||
environments() {
|
||||
return this.folder?.environments;
|
||||
},
|
||||
isLoading() {
|
||||
return this.$apollo.queries.folder.loading;
|
||||
},
|
||||
},
|
||||
i18n: {
|
||||
pageTitle: s__('Environments|Environments'),
|
||||
|
|
@ -14,8 +49,26 @@ export default {
|
|||
};
|
||||
</script>
|
||||
<template>
|
||||
<h4 class="gl-font-weight-normal" data-testid="folder-name">
|
||||
{{ $options.i18n.pageTitle }} /
|
||||
<b>{{ folderName }}</b>
|
||||
</h4>
|
||||
<div>
|
||||
<h4 class="gl-font-weight-normal" data-testid="folder-name">
|
||||
{{ $options.i18n.pageTitle }} /
|
||||
<b>{{ folderName }}</b>
|
||||
</h4>
|
||||
<div v-if="isLoading">
|
||||
<div
|
||||
v-for="n in 3"
|
||||
:key="`skeleton-box-${n}`"
|
||||
class="gl-border-gray-100 gl-border-t-solid gl-border-1 gl-py-5 gl-md-pl-7"
|
||||
>
|
||||
<gl-skeleton-loader :lines="2" />
|
||||
</div>
|
||||
</div>
|
||||
<environment-item
|
||||
v-for="environment in environments"
|
||||
:key="environment.name"
|
||||
:environment="environment"
|
||||
class="gl-border-gray-100 gl-border-t-solid gl-border-1 gl-pt-3"
|
||||
in-folder
|
||||
/>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -2,13 +2,14 @@ import Vue from 'vue';
|
|||
import VueApollo from 'vue-apollo';
|
||||
import createDefaultClient from '~/lib/graphql';
|
||||
import Translate from '~/vue_shared/translate';
|
||||
import { apolloProvider } from '../graphql/client';
|
||||
import EnvironmentsFolderView from './environments_folder_view.vue';
|
||||
import EnvironmentsFolderApp from './environments_folder_app.vue';
|
||||
|
||||
Vue.use(Translate);
|
||||
Vue.use(VueApollo);
|
||||
|
||||
const apolloProvider = new VueApollo({
|
||||
const legacyApolloProvider = new VueApollo({
|
||||
defaultClient: createDefaultClient(),
|
||||
});
|
||||
|
||||
|
|
@ -17,16 +18,25 @@ export default () => {
|
|||
const environmentsData = el.dataset;
|
||||
if (gon.features.environmentsFolderNewLook) {
|
||||
const folderName = environmentsData.environmentsDataFolderName;
|
||||
const folderPath = environmentsData.environmentsDataEndpoint.replace('.json', '');
|
||||
const projectPath = environmentsData.environmentsDataProjectPath;
|
||||
const helpPagePath = environmentsData.environmentsDataHelpPagePath;
|
||||
|
||||
return new Vue({
|
||||
el,
|
||||
components: {
|
||||
EnvironmentsFolderApp,
|
||||
},
|
||||
provide: {
|
||||
projectPath,
|
||||
helpPagePath,
|
||||
},
|
||||
apolloProvider,
|
||||
render(createElement) {
|
||||
return createElement('environments-folder-app', {
|
||||
props: {
|
||||
folderName,
|
||||
folderPath,
|
||||
},
|
||||
});
|
||||
},
|
||||
|
|
@ -38,7 +48,7 @@ export default () => {
|
|||
components: {
|
||||
EnvironmentsFolderView,
|
||||
},
|
||||
apolloProvider,
|
||||
apolloProvider: legacyApolloProvider,
|
||||
provide: {
|
||||
projectPath: el.dataset.projectPath,
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1,5 +1,10 @@
|
|||
query getEnvironmentFolder($environment: NestedLocalEnvironment, $scope: String, $search: String) {
|
||||
folder(environment: $environment, scope: $scope, search: $search) @client {
|
||||
query getEnvironmentFolder(
|
||||
$environment: NestedLocalEnvironment
|
||||
$scope: String
|
||||
$search: String
|
||||
$perPage: Int
|
||||
) {
|
||||
folder(environment: $environment, scope: $scope, search: $search, perPage: $perPage) @client {
|
||||
activeCount
|
||||
environments
|
||||
stoppedCount
|
||||
|
|
|
|||
|
|
@ -59,8 +59,10 @@ export const baseQueries = (endpoint) => ({
|
|||
};
|
||||
});
|
||||
},
|
||||
folder(_, { environment: { folderPath }, scope, search }) {
|
||||
return axios.get(folderPath, { params: { scope, search, per_page: 3 } }).then((res) => ({
|
||||
folder(_, { environment: { folderPath }, scope, search, perPage }) {
|
||||
// eslint-disable-next-line camelcase
|
||||
const per_page = perPage || 3;
|
||||
return axios.get(folderPath, { params: { scope, search, per_page } }).then((res) => ({
|
||||
activeCount: res.data.active_count,
|
||||
environments: res.data.environments.map(mapEnvironment),
|
||||
stoppedCount: res.data.stopped_count,
|
||||
|
|
|
|||
|
|
@ -176,7 +176,10 @@ export default {
|
|||
</p>
|
||||
<form v-if="showForm" class="js-quick-submit" @submit.prevent="submitForm">
|
||||
<tag-field />
|
||||
<gl-form-group :label="__('Release title')">
|
||||
<gl-form-group
|
||||
:label="__('Release title')"
|
||||
:description="s__('Release|Leave blank to use the tag name as the release title.')"
|
||||
>
|
||||
<gl-form-input
|
||||
id="release-title"
|
||||
ref="releaseTitleInput"
|
||||
|
|
|
|||
|
|
@ -77,12 +77,12 @@ export default {
|
|||
:size="32"
|
||||
/>
|
||||
<div class="commit-detail flex-list gl-display-flex gl-flex-grow-1 gl-min-w-0">
|
||||
<div class="commit-content gl-w-full gl-text-truncate" data-testid="commit-content">
|
||||
<div class="commit-content gl-w-full" data-testid="commit-content">
|
||||
<gl-link
|
||||
v-safe-html:[$options.safeHtmlConfig]="commit.titleHtml"
|
||||
:href="commit.webPath"
|
||||
:class="{ 'gl-font-style-italic': !commit.message }"
|
||||
class="commit-row-message item-title"
|
||||
class="commit-row-message item-title gl-line-clamp-1"
|
||||
/>
|
||||
<gl-button
|
||||
v-if="commit.descriptionHtml"
|
||||
|
|
@ -95,7 +95,7 @@ export default {
|
|||
icon="ellipsis_h"
|
||||
@click="toggleShowDescription"
|
||||
/>
|
||||
<div class="committer">
|
||||
<div class="committer gl-pb-2">
|
||||
<gl-link
|
||||
v-if="commit.author"
|
||||
:href="commit.author.webPath"
|
||||
|
|
|
|||
|
|
@ -101,14 +101,9 @@ module Autocomplete
|
|||
if Feature.enabled?(:group_users_autocomplete_using_batch_reduction, current_user)
|
||||
members_relation = ::Autocomplete::GroupUsersFinder.new(group: group, members_relation: true).execute # rubocop: disable CodeReuse/Finder
|
||||
|
||||
user_ids = Set.new
|
||||
members_relation.each_batch(of: BATCH_SIZE) do |relation|
|
||||
user_ids += relation.pluck_user_ids
|
||||
end
|
||||
|
||||
user_relations = []
|
||||
user_ids.to_a.in_groups_of(BATCH_SIZE, false) do |batch_user_ids|
|
||||
user_relations << users_relation.id_in(batch_user_ids)
|
||||
members_relation.distinct_each_batch(column: :user_id, of: BATCH_SIZE) do |relation|
|
||||
user_relations << users_relation.id_in(relation.pluck_user_ids)
|
||||
end
|
||||
|
||||
# When there is more than 1 batch, we need to apply users_relation again on
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ module Projects
|
|||
class ModelFinder
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
VALID_ORDER_BY = %w[name created_at id].freeze
|
||||
VALID_ORDER_BY = %w[name created_at updated_at id].freeze
|
||||
VALID_SORT = %w[asc desc].freeze
|
||||
|
||||
def initialize(project, params = {})
|
||||
|
|
|
|||
|
|
@ -13,6 +13,8 @@ module EnvironmentsHelper
|
|||
{
|
||||
"endpoint" => folder_project_environments_path(@project, @folder, format: :json),
|
||||
"folder_name" => @folder,
|
||||
"project_path" => project_path(@project),
|
||||
"help_page_path" => help_page_path("ci/environments/index"),
|
||||
"can_read_environment" => can?(current_user, :read_environment, @project).to_s
|
||||
}
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,6 +7,8 @@ module Gitlab
|
|||
module ObjectImporter
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
FAILED_IMPORT_STATES = %w[canceled failed].freeze
|
||||
|
||||
included do
|
||||
include ApplicationWorker
|
||||
|
||||
|
|
@ -33,8 +35,10 @@ module Gitlab
|
|||
|
||||
return unless project
|
||||
|
||||
if project.import_state&.canceled?
|
||||
info(project.id, message: 'project import canceled')
|
||||
import_state = project.import_status
|
||||
|
||||
if FAILED_IMPORT_STATES.include?(import_state)
|
||||
info(project.id, message: "project import #{import_state}")
|
||||
return
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +0,0 @@
|
|||
---
|
||||
name: bulk_import_async_references_pipeline
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/135806
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/430181
|
||||
milestone: '16.7'
|
||||
type: development
|
||||
group: group::import and integrate
|
||||
default_enabled: false
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class DropIndexUsersOnAcceptedTermId < Gitlab::Database::Migration[2.2]
|
||||
milestone '16.7'
|
||||
disable_ddl_transaction!
|
||||
|
||||
TABLE_NAME = 'users'
|
||||
INDEX_NAME = 'index_users_on_accepted_term_id'
|
||||
COLUMN = 'accepted_term_id'
|
||||
|
||||
def up
|
||||
remove_concurrent_index_by_name TABLE_NAME, name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
add_concurrent_index TABLE_NAME, COLUMN, name: INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
5d4421756749b4ef2cc74213e5e73d6ecec8f47d7db3645941f5d66d162100ac
|
||||
|
|
@ -34882,8 +34882,6 @@ CREATE INDEX index_users_for_active_billable_users ON users USING btree (id) WHE
|
|||
|
||||
CREATE INDEX index_users_for_auditors ON users USING btree (id) WHERE (auditor IS TRUE);
|
||||
|
||||
CREATE INDEX index_users_on_accepted_term_id ON users USING btree (accepted_term_id);
|
||||
|
||||
CREATE INDEX index_users_on_admin ON users USING btree (admin);
|
||||
|
||||
CREATE UNIQUE INDEX index_users_on_confirmation_token ON users USING btree (confirmation_token);
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ type: reference
|
|||
# GitLab Admin Area **(FREE SELF)**
|
||||
|
||||
The Admin Area provides a web UI to manage and configure features of GitLab
|
||||
self-managed instances. If you are an administrator,to access the Admin Area:
|
||||
self-managed instances. If you are an administrator, to access the Admin Area:
|
||||
|
||||
- In GitLab 16.1 and later: on the left sidebar, select **Search or go to**, then select **Admin Area**.
|
||||
- In GitLab 16.0 and earlier: on the top bar, select **Main menu > Admin**.
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ Maintainer role.
|
|||
Prerequisites:
|
||||
|
||||
- When granting the **Allowed to deploy** permission to a group or subgroup, the user configuring the protected environment must be a **direct member** of the group or subgroup to be added. Otherwise, the group or subgroup does not show up in the dropdown list. For more information see [issue #345140](https://gitlab.com/gitlab-org/gitlab/-/issues/345140).
|
||||
- When granting **Allowed to deploy** and **Approvers** permissions to a group or project by using the settings UI, only direct members of the group or project receive these permissions. To grant these permissions to inherited members also, [use the API](../../api/protected_environments.md#group-inheritance-types). For more information see [issue #422392](https://gitlab.com/gitlab-org/gitlab/-/issues/422392).
|
||||
|
||||
To protect an environment:
|
||||
|
||||
|
|
|
|||
|
|
@ -53,8 +53,13 @@ class Animal < ActiveRecord::Base
|
|||
def self.inheritance_column = 'species'
|
||||
end
|
||||
|
||||
class Dog < Animal; end
|
||||
class Cat < Animal; end
|
||||
class Dog < Animal
|
||||
self.allow_legacy_sti_class = true
|
||||
end
|
||||
|
||||
class Cat < Animal
|
||||
self.allow_legacy_sti_class = true
|
||||
end
|
||||
```
|
||||
|
||||
If your table already has a `*_type`, new classes for the different types can be added as needed.
|
||||
|
|
|
|||
|
|
@ -467,6 +467,10 @@ existing feature requests in the [GitLab](https://gitlab.com/gitlab-org/gitlab/-
|
|||
These issues are the best avenue for getting updates on specific product plans
|
||||
and for communicating directly with the relevant GitLab team members.
|
||||
|
||||
## Storage
|
||||
|
||||
The amount of storage and transfer for self-managed instances has no application limits. Administrators are responsible for the underlying infrastructure costs and can set [repository size limits](../../administration/settings/account_and_limit_settings.md#repository-size-limit).
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Subscription data fails to synchronize
|
||||
|
|
|
|||
|
|
@ -116,7 +116,7 @@ This rule enforces the defined actions based on security scan findings.
|
|||
|-------|------|----------|-----------------|-------------|
|
||||
| `type` | `string` | true | `scan_finding` | The rule's type. |
|
||||
| `branches` | `array` of `string` | true if `branch_type` field does not exist | `[]` or the branch's name | Applicable only to protected target branches. An empty array, `[]`, applies the rule to all protected target branches. Cannot be used with the `branch_type` field. |
|
||||
| `branch_type` | `string` | true if `branches` field does not exist | `default` or `protected` | The types of branches the given policy applies to. Cannot be used with the `branches` field. |
|
||||
| `branch_type` | `string` | true if `branches` field does not exist | `default` or `protected` | The types of protected branches the given policy applies to. Cannot be used with the `branches` field. Default branches must also be `protected`. |
|
||||
| `branch_exceptions` | `array` of `string` | false | Names of branches | Branches to exclude from this rule. |
|
||||
| `scanners` | `array` of `string` | true | `sast`, `secret_detection`, `dependency_scanning`, `container_scanning`, `dast`, `coverage_fuzzing`, `api_fuzzing` | The security scanners for this rule to consider. `sast` includes results from both SAST and SAST IaC scanners. |
|
||||
| `vulnerabilities_allowed` | `integer` | true | Greater than or equal to zero | Number of vulnerabilities allowed before this rule is considered. |
|
||||
|
|
@ -137,7 +137,7 @@ This rule enforces the defined actions based on license findings.
|
|||
|------------|------|----------|-----------------|-------------|
|
||||
| `type` | `string` | true | `license_finding` | The rule's type. |
|
||||
| `branches` | `array` of `string` | true if `branch_type` field does not exist | `[]` or the branch's name | Applicable only to protected target branches. An empty array, `[]`, applies the rule to all protected target branches. Cannot be used with the `branch_type` field. |
|
||||
| `branch_type` | `string` | true if `branches` field does not exist | `default` or `protected` | The types of branches the given policy applies to. Cannot be used with the `branches` field. |
|
||||
| `branch_type` | `string` | true if `branches` field does not exist | `default` or `protected` | The types of protected branches the given policy applies to. Cannot be used with the `branches` field. Default branches must also be `protected`. |
|
||||
| `branch_exceptions` | `array` of `string` | false | Names of branches | Branches to exclude from this rule. |
|
||||
| `match_on_inclusion` | `boolean` | true | `true`, `false` | Whether the rule matches inclusion or exclusion of licenses listed in `license_types`. |
|
||||
| `license_types` | `array` of `string` | true | license types | [SPDX license names](https://spdx.org/licenses) to match on, for example `Affero General Public License v1.0` or `MIT License`. |
|
||||
|
|
@ -158,7 +158,7 @@ This rule enforces the defined actions for any merge request based on the commit
|
|||
|---------------|---------------------|--------------------------------------------|---------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `type` | `string` | true | `any_merge_request` | The rule's type. |
|
||||
| `branches` | `array` of `string` | true if `branch_type` field does not exist | `[]` or the branch's name | Applicable only to protected target branches. An empty array, `[]`, applies the rule to all protected target branches. Cannot be used with the `branch_type` field. |
|
||||
| `branch_type` | `string` | true if `branches` field does not exist | `default` or `protected` | The types of branches the given policy applies to. Cannot be used with the `branches` field. |
|
||||
| `branch_type` | `string` | true if `branches` field does not exist | `default` or `protected` | The types of protected branches the given policy applies to. Cannot be used with the `branches` field. Default branches must also be `protected`. |
|
||||
| `branch_exceptions` | `array` of `string` | false | Names of branches | Branches to exclude from this rule. |
|
||||
| `commits` | `string` | true | `any`, `unsigned` | Whether the rule matches for any commits, or only if unsigned commits are detected in the merge request. |
|
||||
|
||||
|
|
|
|||
|
|
@ -84,6 +84,8 @@ If you are importing from GitHub.com to a self-managed GitLab instance:
|
|||
[OmniAuth configuration](../../../integration/github.md#enable-github-oauth-in-gitlab).
|
||||
- Because of a [known issue](https://gitlab.com/gitlab-org/gitlab/-/issues/424400), Markdown attachments from
|
||||
repositories on GitHub Enterprise Server instances aren't imported.
|
||||
- Because of a [known issue](https://gitlab.com/gitlab-org/gitlab/-/issues/418800), when importing projects that used
|
||||
[GitHub auto-merge](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/incorporating-changes-from-a-pull-request/automatically-merging-a-pull-request), the imported project in GitLab can have merge commits labeled "unverified" if the commit was signed with GitHub's internal GPG key.
|
||||
|
||||
## Import your GitHub repository into GitLab
|
||||
|
||||
|
|
|
|||
|
|
@ -36,14 +36,17 @@ GitLab Duo Code Suggestions are available:
|
|||
</figure>
|
||||
|
||||
During Beta, usage of Code Suggestions is governed by the [GitLab Testing Agreement](https://about.gitlab.com/handbook/legal/testing-agreement/).
|
||||
Learn about [data usage when using Code Suggestions](#code-suggestions-data-usage). As Code Suggestions matures to General Availibility it will be governed by our [AI Functionality Terms](https://about.gitlab.com/handbook/legal/ai-functionality-terms/).
|
||||
Learn about [data usage when using Code Suggestions](#code-suggestions-data-usage). As Code Suggestions matures to General Availability it will be governed by our [AI Functionality Terms](https://about.gitlab.com/handbook/legal/ai-functionality-terms/).
|
||||
|
||||
## Use Code Suggestions
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- Code Suggestions must be enabled for [SaaS](saas.md#enable-code-suggestions) or for [self-managed](self_managed.md#enable-code-suggestions-on-self-managed-gitlab).
|
||||
- You must have installed and configured a [supported IDE editor extension](index.md#supported-editor-extensions).
|
||||
- If you are a **SaaS** user, you must enable Code Suggestions for:
|
||||
- [The top-level group](../../../group/manage.md#enable-code-suggestions) (you must have the Owner role for that group).
|
||||
- [Your own account](../../../profile/preferences.md#enable-code-suggestions).
|
||||
- If you are a **self-managed** user, you must enable Code Suggestions [for your instance](self_managed.md#enable-code-suggestions-on-self-managed-gitlab). How you enable Code Suggestions differs depending on your version of GitLab.
|
||||
|
||||
To use Code Suggestions:
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ group: Utilization
|
|||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
|
||||
---
|
||||
|
||||
# Storage **(FREE ALL)**
|
||||
# Storage **(FREE SAAS)**
|
||||
|
||||
All projects on GitLab SaaS have 10 GiB of free storage for their Git repository and Large File Storage (LFS).
|
||||
|
||||
|
|
@ -16,13 +16,11 @@ you must [purchase more storage](../subscriptions/gitlab_com/index.md#purchase-m
|
|||
GitLab plans to introduce storage limits for namespaces on GitLab SaaS. After these storage limits have been applied,
|
||||
storage usage will be calculated across the entire namespace and project storage limits will no longer apply.
|
||||
|
||||
The amount of storage and transfer for self-managed instances has no application limits. Administrators are responsible for the underlying infrastructure costs and can set [repository size limits](../administration/settings/account_and_limit_settings.md#repository-size-limit).
|
||||
|
||||
## View storage
|
||||
## View storage **(FREE ALL)**
|
||||
|
||||
You can view the following statistics for storage usage in projects and namespaces:
|
||||
|
||||
- Storage usage that exceeds the GitLab SaaS storage limit or [self-managed storage quota](../administration/settings/account_and_limit_settings.md#repository-size-limit).
|
||||
- Storage usage that exceeds the GitLab SaaS storage limit or [self-managed storage limits](../administration/settings/account_and_limit_settings.md#repository-size-limit).
|
||||
- Available purchased storage for GitLab SaaS.
|
||||
|
||||
Prerequisites:
|
||||
|
|
@ -49,7 +47,7 @@ NOTE:
|
|||
Storage usage labels are being transitioned from `KB` to `KiB`, `MB` to `MiB`, and `GB` to `GiB`. During this transition,
|
||||
you might see references to `KB`, `MB`, and `GB` in the UI and documentation.
|
||||
|
||||
### View project fork storage usage **(FREE SAAS)**
|
||||
## View project fork storage usage
|
||||
|
||||
A cost factor is applied to the storage consumed by project forks so that forks consume less namespace storage than their actual size.
|
||||
|
||||
|
|
@ -63,7 +61,7 @@ The cost factor applies to the project repository, LFS objects, job artifacts, p
|
|||
|
||||
The cost factor does not apply to private forks in namespaces on the Free plan.
|
||||
|
||||
## Excess storage usage **(FREE SAAS)**
|
||||
## Excess storage usage
|
||||
|
||||
Excess storage usage is the amount that exceeds the 10 GiB free storage of a project's repository and LFS. If no purchased storage is available,
|
||||
the project is set to a read-only state. You cannot push changes to a read-only project.
|
||||
|
|
@ -109,7 +107,7 @@ available decreases. All projects no longer have the read-only status because 40
|
|||
| Yellow | 5 GiB | 0 GiB | 10 GiB | Not read-only |
|
||||
| **Totals** | **45 GiB** | **10 GiB** | - | - |
|
||||
|
||||
## Namespace storage limit **(FREE SAAS)**
|
||||
## Namespace storage limit
|
||||
|
||||
GitLab plans to introduce the following storage limits per top-level group:
|
||||
|
||||
|
|
@ -160,7 +158,7 @@ Storage types that count toward the total namespace storage are:
|
|||
- Wiki
|
||||
- Snippets
|
||||
|
||||
### Excess storage notifications **(FREE SAAS)**
|
||||
### Excess storage notifications
|
||||
|
||||
Storage limits are included in GitLab subscription terms but do not apply. At least 60 days before GitLab introduces storage limits,
|
||||
GitLab will notify you of namespaces that exceed, or are close to exceeding, the storage limit.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module API
|
||||
module Entities
|
||||
module Ml
|
||||
module Mlflow
|
||||
class ListRegisteredModels < Grape::Entity
|
||||
expose :registered_models, with: RegisteredModel, as: :registered_models
|
||||
expose :next_page_token
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -21,6 +21,10 @@ module API
|
|||
def last_updated_timestamp
|
||||
object.updated_at.to_i
|
||||
end
|
||||
|
||||
def description
|
||||
object.description.to_s
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@ module API
|
|||
module Ml
|
||||
module Mlflow
|
||||
module ApiHelpers
|
||||
OUTER_QUOTES_REGEXP = /^("|')|("|')?$/
|
||||
|
||||
def check_api_read!
|
||||
not_found! unless can?(current_user, :read_model_experiments, user_project)
|
||||
end
|
||||
|
|
@ -83,6 +85,34 @@ module API
|
|||
}
|
||||
end
|
||||
|
||||
def model_order_params(params)
|
||||
if params[:order_by].blank?
|
||||
order_by = 'name'
|
||||
sort = 'asc'
|
||||
else
|
||||
order_by, sort = params[:order_by].downcase.split(' ')
|
||||
order_by = 'updated_at' if order_by == 'last_updated_timestamp'
|
||||
sort ||= 'asc'
|
||||
end
|
||||
|
||||
{
|
||||
order_by: order_by,
|
||||
sort: sort
|
||||
}
|
||||
end
|
||||
|
||||
def model_filter_params(params)
|
||||
return {} if params[:filter].blank?
|
||||
|
||||
param, filter = params[:filter].split('=')
|
||||
|
||||
return {} unless param == 'name'
|
||||
|
||||
filter.gsub!(OUTER_QUOTES_REGEXP, '') unless filter.blank?
|
||||
|
||||
{ name: filter }
|
||||
end
|
||||
|
||||
def find_experiment!(iid, name)
|
||||
experiment_repository.by_iid_or_name(iid: iid, name: name) || resource_not_found!
|
||||
end
|
||||
|
|
|
|||
|
|
@ -120,6 +120,49 @@ module API
|
|||
render_api_error!('Model could not be deleted', 400)
|
||||
end
|
||||
end
|
||||
|
||||
desc 'Search Registered Models within a project' do
|
||||
success Entities::Ml::Mlflow::RegisteredModel
|
||||
detail 'https://mlflow.org/docs/2.6.0/rest-api.html#search-registeredmodels'
|
||||
end
|
||||
params do
|
||||
optional :filter,
|
||||
type: String,
|
||||
desc: "Filter to search models. must be in the format `name='value'`. Only filtering by name is supported"
|
||||
optional :max_results,
|
||||
type: Integer,
|
||||
desc: 'Maximum number of models desired. Default is 200. Max threshold is 1000.',
|
||||
default: 200
|
||||
optional :order_by,
|
||||
type: String,
|
||||
desc: 'Order criteria. Can be by name or last_updated_timestamp, with optional DESC or ASC (default)' \
|
||||
'Valid examples: `name`, `name DESC`, `last_updated_timestamp DESC`' \
|
||||
'Sorting by model metadata is not supported.',
|
||||
default: 'name ASC'
|
||||
optional :page_token,
|
||||
type: String,
|
||||
desc: 'Token for pagination'
|
||||
end
|
||||
get 'search', urgency: :low do
|
||||
max_results = [params[:max_results], 1000].min
|
||||
|
||||
finder_params = model_order_params(params)
|
||||
filter_params = model_filter_params(params)
|
||||
|
||||
if !params[:filter].nil? && !filter_params.key?(:name)
|
||||
invalid_parameter!("Invalid attribute key specified. Valid keys are '{'name'}'")
|
||||
end
|
||||
|
||||
finder = ::Projects::Ml::ModelFinder.new(user_project, finder_params.merge(filter_params))
|
||||
paginator = finder.execute.keyset_paginate(cursor: params[:page_token], per_page: max_results)
|
||||
|
||||
result = {
|
||||
registered_models: paginator.records,
|
||||
next_page_token: paginator.cursor_for_next_page
|
||||
}
|
||||
|
||||
present result, with: Entities::Ml::Mlflow::ListRegisteredModels
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,131 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module BulkImports
|
||||
module Projects
|
||||
module Pipelines
|
||||
class LegacyReferencesPipeline
|
||||
include Pipeline
|
||||
|
||||
BATCH_SIZE = 100
|
||||
|
||||
def extract(_context)
|
||||
data = Enumerator.new do |enum|
|
||||
add_matching_objects(portable.issues, enum)
|
||||
add_matching_objects(portable.merge_requests, enum)
|
||||
add_notes(portable.issues, enum)
|
||||
add_notes(portable.merge_requests, enum)
|
||||
end
|
||||
|
||||
BulkImports::Pipeline::ExtractedData.new(data: data)
|
||||
end
|
||||
|
||||
def transform(_context, object)
|
||||
body = object_body(object).dup
|
||||
|
||||
body.gsub!(username_regex(mapped_usernames), mapped_usernames)
|
||||
|
||||
matching_urls(object).each do |old_url, new_url|
|
||||
body.gsub!(old_url, new_url) if body.include?(old_url)
|
||||
end
|
||||
|
||||
object.assign_attributes(body_field(object) => body)
|
||||
|
||||
object
|
||||
end
|
||||
|
||||
def load(_context, object)
|
||||
object.save! if object_body_changed?(object)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def mapped_usernames
|
||||
@mapped_usernames ||= ::BulkImports::UsersMapper.new(context: context)
|
||||
.map_usernames.transform_keys { |key| "@#{key}" }
|
||||
.transform_values { |value| "@#{value}" }
|
||||
end
|
||||
|
||||
def username_regex(mapped_usernames)
|
||||
@username_regex ||= Regexp.new(mapped_usernames.keys.sort_by(&:length)
|
||||
.reverse.map { |x| Regexp.escape(x) }.join('|'))
|
||||
end
|
||||
|
||||
def add_matching_objects(collection, enum)
|
||||
collection.each_batch(of: BATCH_SIZE, column: :iid) do |batch|
|
||||
batch.each do |object|
|
||||
enum << object if object_has_reference?(object) || object_has_username?(object)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def add_notes(collection, enum)
|
||||
collection.each_batch(of: BATCH_SIZE, column: :iid) do |batch|
|
||||
batch.each do |object|
|
||||
object.notes.each_batch(of: BATCH_SIZE) do |notes_batch|
|
||||
notes_batch.each do |note|
|
||||
note.refresh_markdown_cache!
|
||||
enum << note if object_has_reference?(note) || object_has_username?(note)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def object_has_reference?(object)
|
||||
object_body(object)&.include?(source_full_path)
|
||||
end
|
||||
|
||||
def object_has_username?(object)
|
||||
return false unless object_body(object)
|
||||
|
||||
mapped_usernames.keys.any? { |old_username| object_body(object).include?(old_username) }
|
||||
end
|
||||
|
||||
def object_body(object)
|
||||
call_object_method(object)
|
||||
end
|
||||
|
||||
def object_body_changed?(object)
|
||||
call_object_method(object, suffix: '_changed?')
|
||||
end
|
||||
|
||||
def call_object_method(object, suffix: nil)
|
||||
method = body_field(object)
|
||||
method = "#{method}#{suffix}" if suffix.present?
|
||||
|
||||
object.public_send(method) # rubocop:disable GitlabSecurity/PublicSend -- the method being called is dependent on several factors
|
||||
end
|
||||
|
||||
def body_field(object)
|
||||
object.is_a?(Note) ? 'note' : 'description'
|
||||
end
|
||||
|
||||
def matching_urls(object)
|
||||
URI.extract(object_body(object), %w[http https]).each_with_object([]) do |url, array|
|
||||
parsed_url = URI.parse(url)
|
||||
|
||||
next unless source_host == parsed_url.host
|
||||
next unless parsed_url.path&.start_with?("/#{source_full_path}")
|
||||
|
||||
array << [url, new_url(parsed_url)]
|
||||
end
|
||||
end
|
||||
|
||||
def new_url(parsed_old_url)
|
||||
parsed_old_url.host = ::Gitlab.config.gitlab.host
|
||||
parsed_old_url.port = ::Gitlab.config.gitlab.port
|
||||
parsed_old_url.scheme = ::Gitlab.config.gitlab.https ? 'https' : 'http'
|
||||
parsed_old_url.to_s.gsub!(source_full_path, portable.full_path)
|
||||
end
|
||||
|
||||
def source_host
|
||||
@source_host ||= URI.parse(context.configuration.url).host
|
||||
end
|
||||
|
||||
def source_full_path
|
||||
context.entity.source_full_path
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -135,7 +135,7 @@ module BulkImports
|
|||
stage: 5
|
||||
},
|
||||
references: {
|
||||
pipeline: references_pipeline,
|
||||
pipeline: BulkImports::Projects::Pipelines::ReferencesPipeline,
|
||||
stage: 5
|
||||
},
|
||||
finisher: {
|
||||
|
|
@ -144,14 +144,6 @@ module BulkImports
|
|||
}
|
||||
}
|
||||
end
|
||||
|
||||
def references_pipeline
|
||||
if Feature.enabled?(:bulk_import_async_references_pipeline)
|
||||
BulkImports::Projects::Pipelines::ReferencesPipeline
|
||||
else
|
||||
BulkImports::Projects::Pipelines::LegacyReferencesPipeline
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6818,12 +6818,6 @@ msgstr ""
|
|||
msgid "At least one of group_id or project_id must be specified"
|
||||
msgstr ""
|
||||
|
||||
msgid "At least one of your Personal Access Tokens is expired. %{generate_new}"
|
||||
msgstr ""
|
||||
|
||||
msgid "At least one of your Personal Access Tokens will expire soon. %{generate_new}"
|
||||
msgstr ""
|
||||
|
||||
msgid "At risk"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -39843,6 +39837,9 @@ msgstr ""
|
|||
msgid "Release|Learn more about releases"
|
||||
msgstr ""
|
||||
|
||||
msgid "Release|Leave blank to use the tag name as the release title."
|
||||
msgstr ""
|
||||
|
||||
msgid "Release|More information"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -48664,6 +48661,9 @@ msgstr ""
|
|||
msgid "The name of the Jenkins project. Copy the name from the end of the URL to the project."
|
||||
msgstr ""
|
||||
|
||||
msgid "The namespace storage size (%{current_size}) exceeds the limit of %{size_limit} by %{exceeded_size}. You won't be able to push new code to this project. Please contact your GitLab administrator for more information."
|
||||
msgstr ""
|
||||
|
||||
msgid "The number of changes to fetch from GitLab when cloning a repository. Lower values can speed up pipeline execution. Set to %{code_open}0%{code_close} or blank to fetch all branches and tags for each job"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -56341,6 +56341,9 @@ msgstr ""
|
|||
msgid "Your name"
|
||||
msgstr ""
|
||||
|
||||
msgid "Your namespace storage is full. This merge request cannot be merged. To continue, %{link_start}manage your storage usage%{link_end}."
|
||||
msgstr ""
|
||||
|
||||
msgid "Your new %{accessTokenType}"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,8 @@ RSpec.describe 'Database schema', feature_category: :database do
|
|||
# but in Search::NamespaceIndexAssignment model, only `search_index_id` is used as foreign key and indexed
|
||||
search_namespace_index_assignments: [%w[search_index_id index_type]],
|
||||
slack_integrations_scopes: [%w[slack_api_scope_id]],
|
||||
notes: %w[namespace_id] # this index is added in an async manner, hence it needs to be ignored in the first phase.
|
||||
notes: %w[namespace_id], # this index is added in an async manner, hence it needs to be ignored in the first phase.
|
||||
users: [%w[accepted_term_id]]
|
||||
}.with_indifferent_access.freeze
|
||||
|
||||
TABLE_PARTITIONS = %w[ci_builds_metadata].freeze
|
||||
|
|
|
|||
|
|
@ -9,6 +9,10 @@ RSpec.describe 'Environments Folder page', :js, feature_category: :environment_m
|
|||
let_it_be(:user) { create(:user) }
|
||||
let!(:envs) { create_list(:environment, 4, :with_folders, project: project, folder: folder_name) }
|
||||
|
||||
def get_env_name(environment)
|
||||
environment.name.split('/').last
|
||||
end
|
||||
|
||||
before_all do
|
||||
project.add_role(user, :developer)
|
||||
end
|
||||
|
|
@ -27,6 +31,11 @@ RSpec.describe 'Environments Folder page', :js, feature_category: :environment_m
|
|||
it 'renders the header with a folder name' do
|
||||
expect(page).to have_content("Environments / #{folder_name}")
|
||||
end
|
||||
|
||||
it 'renders the environments' do
|
||||
expect(page).not_to have_content('production')
|
||||
envs.each { |env| expect(page).to have_content(get_env_name(env)) }
|
||||
end
|
||||
end
|
||||
|
||||
describe 'legacy folders page' do
|
||||
|
|
@ -40,7 +49,7 @@ RSpec.describe 'Environments Folder page', :js, feature_category: :environment_m
|
|||
it 'user opens folder view' do
|
||||
expect(page).to have_content("Environments / #{folder_name}")
|
||||
expect(page).not_to have_content('production')
|
||||
envs.each { |env| expect(page).to have_content(env.name.split('/').last) }
|
||||
envs.each { |env| expect(page).to have_content(get_env_name(env)) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ RSpec.describe Projects::Ml::ModelFinder, feature_category: :mlops do
|
|||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:model1) { create(:ml_models, :with_versions, project: project) }
|
||||
let_it_be(:model2) { create(:ml_models, :with_versions, project: project) }
|
||||
let_it_be(:model3) { create(:ml_models, name: "#{model1.name}_1", project: project) }
|
||||
let_it_be(:model3) { create(:ml_models, name: "#{model1.name}_1", project: project, updated_at: 1.week.ago) }
|
||||
let_it_be(:other_model) { create(:ml_models) }
|
||||
let_it_be(:project_models) { [model1, model2, model3] }
|
||||
|
||||
|
|
@ -52,6 +52,7 @@ RSpec.describe Projects::Ml::ModelFinder, feature_category: :mlops do
|
|||
'by column' | 'name' | 'ASC' | [0, 2, 1]
|
||||
'invalid sort' | nil | 'UP' | [2, 1, 0]
|
||||
'invalid order by' | 'INVALID' | nil | [2, 1, 0]
|
||||
'order by updated_at' | 'updated_at' | nil | [1, 0, 2]
|
||||
end
|
||||
with_them do
|
||||
let(:params) { { order_by: order_by, sort: direction } }
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@
|
|||
"type": "string"
|
||||
},
|
||||
"user_id": {
|
||||
"type": "integer"
|
||||
"type": "string"
|
||||
},
|
||||
"creation_timestamp": {
|
||||
"type": "integer"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,53 @@
|
|||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"registered_models",
|
||||
"next_page_token"
|
||||
],
|
||||
"properties": {
|
||||
"registered_models": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"user_id": {
|
||||
"type": "string"
|
||||
},
|
||||
"creation_timestamp": {
|
||||
"type": "integer"
|
||||
},
|
||||
"last_updated_timestamp": {
|
||||
"type": "integer"
|
||||
},
|
||||
"tags": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"key",
|
||||
"value"
|
||||
],
|
||||
"properties": {
|
||||
"key": {
|
||||
"type": "string"
|
||||
},
|
||||
"value": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,23 +1,80 @@
|
|||
import Vue from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import { GlSkeletonLoader } from '@gitlab/ui';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import EnvironmentsFolderAppComponent from '~/environments/folder/environments_folder_app.vue';
|
||||
import EnvironmentItem from '~/environments/components/new_environment_item.vue';
|
||||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import { resolvedFolder } from '../graphql/mock_data';
|
||||
|
||||
Vue.use(VueApollo);
|
||||
|
||||
describe('EnvironmentsFolderAppComponent', () => {
|
||||
let wrapper;
|
||||
const mockFolderName = 'folders';
|
||||
|
||||
const createWrapper = () => {
|
||||
let environmentFolderMock;
|
||||
|
||||
const createApolloProvider = () => {
|
||||
const mockResolvers = {
|
||||
Query: {
|
||||
folder: environmentFolderMock,
|
||||
},
|
||||
};
|
||||
|
||||
return createMockApollo([], mockResolvers);
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
environmentFolderMock = jest.fn();
|
||||
});
|
||||
|
||||
const emptyFolderData = {
|
||||
environments: [],
|
||||
activeCount: 0,
|
||||
stoppedCount: 0,
|
||||
__typename: 'LocalEnvironmentFolder',
|
||||
};
|
||||
|
||||
const createWrapper = ({ folderData } = {}) => {
|
||||
environmentFolderMock.mockReturnValue(folderData || emptyFolderData);
|
||||
|
||||
const apolloProvider = createApolloProvider();
|
||||
|
||||
wrapper = shallowMountExtended(EnvironmentsFolderAppComponent, {
|
||||
apolloProvider,
|
||||
propsData: {
|
||||
folderName: mockFolderName,
|
||||
folderPath: '/gitlab-org/test-project/-/environments/folder/dev',
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const findHeader = () => wrapper.findByTestId('folder-name');
|
||||
const findEnvironmentItems = () => wrapper.findAllComponents(EnvironmentItem);
|
||||
const findSkeletonLoaders = () => wrapper.findAllComponents(GlSkeletonLoader);
|
||||
|
||||
it('should render a header with the folder name', () => {
|
||||
createWrapper();
|
||||
|
||||
expect(findHeader().text()).toMatchInterpolatedText(`Environments / ${mockFolderName}`);
|
||||
});
|
||||
|
||||
it('should show skeletons while loading', () => {
|
||||
createWrapper();
|
||||
expect(findSkeletonLoaders().length).toBe(3);
|
||||
});
|
||||
|
||||
describe('when environments are loaded', () => {
|
||||
beforeEach(async () => {
|
||||
createWrapper({ folderData: resolvedFolder });
|
||||
await waitForPromises();
|
||||
});
|
||||
|
||||
it('should list environmnets in folder', () => {
|
||||
const items = findEnvironmentItems();
|
||||
expect(items.length).toBe(resolvedFolder.environments.length);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -61,4 +61,47 @@ RSpec.describe API::Ml::Mlflow::ApiHelpers, feature_category: :mlops do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#model_order_params' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
subject { model_order_params(params) }
|
||||
|
||||
where(:input, :order_by, :sort) do
|
||||
'' | 'name' | 'asc'
|
||||
'name' | 'name' | 'asc'
|
||||
'name DESC' | 'name' | 'desc'
|
||||
'last_updated_timestamp' | 'updated_at' | 'asc'
|
||||
'last_updated_timestamp asc' | 'updated_at' | 'asc'
|
||||
'last_updated_timestamp DESC' | 'updated_at' | 'desc'
|
||||
end
|
||||
with_them do
|
||||
let(:params) { { order_by: input } }
|
||||
|
||||
it 'is correct' do
|
||||
is_expected.to include({ order_by: order_by, sort: sort })
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#model_filter_params' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
subject { model_filter_params(params) }
|
||||
|
||||
where(:input, :output) do
|
||||
'' | {}
|
||||
'name=""' | { name: '' }
|
||||
'name=foo' | { name: 'foo' }
|
||||
'name="foo"' | { name: 'foo' }
|
||||
'invalid="foo"' | {}
|
||||
end
|
||||
with_them do
|
||||
let(:params) { { filter: input } }
|
||||
|
||||
it 'is correct' do
|
||||
is_expected.to eq(output)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,268 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe BulkImports::Projects::Pipelines::LegacyReferencesPipeline, feature_category: :importers do
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
|
||||
let_it_be(:config) { create(:bulk_import_configuration, bulk_import: bulk_import, url: 'https://my.gitlab.com') }
|
||||
let_it_be(:entity) do
|
||||
create(
|
||||
:bulk_import_entity,
|
||||
:project_entity,
|
||||
project: project,
|
||||
bulk_import: bulk_import,
|
||||
source_full_path: 'source/full/path'
|
||||
)
|
||||
end
|
||||
|
||||
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
|
||||
let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
|
||||
let(:issue) { create(:issue, project: project, description: 'https://my.gitlab.com/source/full/path/-/issues/1') }
|
||||
let(:mr) do
|
||||
create(
|
||||
:merge_request,
|
||||
source_project: project,
|
||||
description: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @source_username? @bob, @alice!'
|
||||
)
|
||||
end
|
||||
|
||||
let(:issue_note) do
|
||||
create(
|
||||
:note,
|
||||
project: project,
|
||||
noteable: issue,
|
||||
note: 'https://my.gitlab.com/source/full/path/-/issues/1 @older_username, not_a@username, and @old_username.'
|
||||
)
|
||||
end
|
||||
|
||||
let(:mr_note) do
|
||||
create(
|
||||
:note,
|
||||
project: project,
|
||||
noteable: mr,
|
||||
note: 'https://my.gitlab.com/source/full/path/-/merge_requests/1 @same_username'
|
||||
)
|
||||
end
|
||||
|
||||
let(:interchanged_usernames) do
|
||||
create(
|
||||
:note,
|
||||
project: project,
|
||||
noteable: mr,
|
||||
note: '@manuelgrabowski-admin, @boaty-mc-boatface'
|
||||
)
|
||||
end
|
||||
|
||||
let(:old_note_html) { 'old note_html' }
|
||||
let(:system_note) do
|
||||
create(
|
||||
:note,
|
||||
project: project,
|
||||
system: true,
|
||||
noteable: issue,
|
||||
note: "mentioned in merge request !#{mr.iid} created by @old_username",
|
||||
note_html: old_note_html
|
||||
)
|
||||
end
|
||||
|
||||
let(:username_system_note) do
|
||||
create(
|
||||
:note,
|
||||
project: project,
|
||||
system: true,
|
||||
noteable: issue,
|
||||
note: "mentioned in merge request created by @source_username.",
|
||||
note_html: 'empty'
|
||||
)
|
||||
end
|
||||
|
||||
subject(:pipeline) { described_class.new(context) }
|
||||
|
||||
before do
|
||||
project.add_owner(user)
|
||||
|
||||
allow(Gitlab::Cache::Import::Caching)
|
||||
.to receive(:values_from_hash)
|
||||
.and_return({
|
||||
'old_username' => 'new_username',
|
||||
'older_username' => 'newer_username',
|
||||
'source_username' => 'destination_username',
|
||||
'bob' => 'alice-gdk',
|
||||
'alice' => 'bob-gdk',
|
||||
'manuelgrabowski' => 'manuelgrabowski-admin',
|
||||
'manuelgrabowski-admin' => 'manuelgrabowski',
|
||||
'boaty-mc-boatface' => 'boatymcboatface',
|
||||
'boatymcboatface' => 'boaty-mc-boatface'
|
||||
})
|
||||
end
|
||||
|
||||
def create_project_data
|
||||
[issue, mr, issue_note, mr_note, system_note, username_system_note]
|
||||
end
|
||||
|
||||
def create_username_project_data
|
||||
[username_system_note]
|
||||
end
|
||||
|
||||
describe '#extract' do
|
||||
it 'returns ExtractedData containing issues, mrs & their notes' do
|
||||
create_project_data
|
||||
|
||||
extracted_data = subject.extract(context)
|
||||
|
||||
expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData)
|
||||
expect(extracted_data.data).to contain_exactly(issue, mr, issue_note, system_note, username_system_note, mr_note)
|
||||
expect(system_note.note_html).not_to eq(old_note_html)
|
||||
expect(system_note.note_html)
|
||||
.to include("class=\"gfm gfm-merge_request\">!#{mr.iid}</a>")
|
||||
.and include(project.full_path.to_s)
|
||||
.and include("@old_username")
|
||||
expect(username_system_note.note_html)
|
||||
.to include("@source_username")
|
||||
end
|
||||
|
||||
context 'when object body is nil' do
|
||||
let(:issue) { create(:issue, project: project, description: nil) }
|
||||
|
||||
it 'returns ExtractedData not containing the object' do
|
||||
extracted_data = subject.extract(context)
|
||||
|
||||
expect(extracted_data.data).to contain_exactly(issue_note, mr, mr_note)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#transform', :clean_gitlab_redis_cache do
|
||||
it 'updates matching urls and usernames with new ones' do
|
||||
transformed_mr = subject.transform(context, mr)
|
||||
transformed_note = subject.transform(context, mr_note)
|
||||
transformed_issue = subject.transform(context, issue)
|
||||
transformed_issue_note = subject.transform(context, issue_note)
|
||||
transformed_system_note = subject.transform(context, system_note)
|
||||
transformed_username_system_note = subject.transform(context, username_system_note)
|
||||
|
||||
expected_url = URI('')
|
||||
expected_url.scheme = ::Gitlab.config.gitlab.https ? 'https' : 'http'
|
||||
expected_url.host = ::Gitlab.config.gitlab.host
|
||||
expected_url.port = ::Gitlab.config.gitlab.port
|
||||
expected_url.path = "/#{project.full_path}/-/merge_requests/#{mr.iid}"
|
||||
|
||||
expect(transformed_issue_note.note).not_to include("@older_username")
|
||||
expect(transformed_mr.description).not_to include("@source_username")
|
||||
expect(transformed_system_note.note).not_to include("@old_username")
|
||||
expect(transformed_username_system_note.note).not_to include("@source_username")
|
||||
|
||||
expect(transformed_issue.description)
|
||||
.to eq("http://localhost:80/#{transformed_issue.namespace.full_path}/-/issues/1")
|
||||
expect(transformed_mr.description).to eq("#{expected_url} @destination_username? @alice-gdk, @bob-gdk!")
|
||||
expect(transformed_note.note).to eq("#{expected_url} @same_username")
|
||||
expect(transformed_issue_note.note).to include("@newer_username, not_a@username, and @new_username.")
|
||||
expect(transformed_system_note.note).to eq("mentioned in merge request !#{mr.iid} created by @new_username")
|
||||
expect(transformed_username_system_note.note).to include("@destination_username.")
|
||||
end
|
||||
|
||||
it 'handles situations where old usernames are substrings of new usernames' do
|
||||
transformed_mr = subject.transform(context, mr)
|
||||
|
||||
expect(transformed_mr.description).to include("@alice-gdk")
|
||||
expect(transformed_mr.description).not_to include("@bob-gdk-gdk")
|
||||
end
|
||||
|
||||
it 'handles situations where old and new usernames are interchanged' do
|
||||
# e.g
|
||||
# |------------------------|-------------------------|
|
||||
# | old_username | new_username |
|
||||
# |------------------------|-------------------------|
|
||||
# | @manuelgrabowski-admin | @manuelgrabowski |
|
||||
# | @manuelgrabowski | @manuelgrabowski-admin |
|
||||
# |------------------------|-------------------------|
|
||||
|
||||
transformed_interchanged_usernames = subject.transform(context, interchanged_usernames)
|
||||
|
||||
expect(transformed_interchanged_usernames.note).to include("@manuelgrabowski")
|
||||
expect(transformed_interchanged_usernames.note).to include("@boatymcboatface")
|
||||
expect(transformed_interchanged_usernames.note).not_to include("@manuelgrabowski-admin")
|
||||
expect(transformed_interchanged_usernames.note).not_to include("@boaty-mc-boatface")
|
||||
end
|
||||
|
||||
context 'when object does not have reference or username' do
|
||||
it 'returns object unchanged' do
|
||||
issue.update!(description: 'foo')
|
||||
|
||||
transformed_issue = subject.transform(context, issue)
|
||||
|
||||
expect(transformed_issue.description).to eq('foo')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are not matched urls or usernames' do
|
||||
let(:description) { 'https://my.gitlab.com/another/project/path/-/issues/1 @random_username' }
|
||||
|
||||
shared_examples 'returns object unchanged' do
|
||||
it 'returns object unchanged' do
|
||||
issue.update!(description: description)
|
||||
|
||||
transformed_issue = subject.transform(context, issue)
|
||||
|
||||
expect(transformed_issue.description).to eq(description)
|
||||
end
|
||||
end
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
|
||||
context 'when url path does not start with source full path' do
|
||||
let(:description) { 'https://my.gitlab.com/another/source/full/path/-/issues/1' }
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
end
|
||||
|
||||
context 'when host does not match and url path starts with source full path' do
|
||||
let(:description) { 'https://another.gitlab.com/source/full/path/-/issues/1' }
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
end
|
||||
|
||||
context 'when url does not match at all' do
|
||||
let(:description) { 'https://website.example/foo/bar' }
|
||||
|
||||
include_examples 'returns object unchanged'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#load' do
|
||||
it 'saves the object when object body changed' do
|
||||
transformed_issue = subject.transform(context, issue)
|
||||
transformed_note = subject.transform(context, mr_note)
|
||||
transformed_mr = subject.transform(context, mr)
|
||||
transformed_issue_note = subject.transform(context, issue_note)
|
||||
transformed_system_note = subject.transform(context, system_note)
|
||||
|
||||
expect(transformed_issue).to receive(:save!)
|
||||
expect(transformed_note).to receive(:save!)
|
||||
expect(transformed_mr).to receive(:save!)
|
||||
expect(transformed_issue_note).to receive(:save!)
|
||||
expect(transformed_system_note).to receive(:save!)
|
||||
|
||||
subject.load(context, transformed_issue)
|
||||
subject.load(context, transformed_note)
|
||||
subject.load(context, transformed_mr)
|
||||
subject.load(context, transformed_issue_note)
|
||||
subject.load(context, transformed_system_note)
|
||||
end
|
||||
|
||||
context 'when object body is not changed' do
|
||||
it 'does not save the object' do
|
||||
expect(mr).not_to receive(:save!)
|
||||
expect(mr_note).not_to receive(:save!)
|
||||
expect(system_note).not_to receive(:save!)
|
||||
|
||||
subject.load(context, mr)
|
||||
subject.load(context, mr_note)
|
||||
subject.load(context, system_note)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -21,24 +21,6 @@ RSpec.describe BulkImports::Projects::Stage, feature_category: :importers do
|
|||
expect(pipelines.last).to match(hash_including({ pipeline: BulkImports::Common::Pipelines::EntityFinisher }))
|
||||
end
|
||||
|
||||
context 'when bulk_import_async_references_pipeline feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(bulk_import_async_references_pipeline: false)
|
||||
end
|
||||
|
||||
it 'uses the legacy references pipeline' do
|
||||
pipelines = subject.pipelines
|
||||
|
||||
expect(pipelines).to include(
|
||||
hash_including({ stage: 5, pipeline: BulkImports::Projects::Pipelines::LegacyReferencesPipeline })
|
||||
)
|
||||
|
||||
expect(pipelines).not_to include(
|
||||
hash_including({ stage: 5, pipeline: BulkImports::Projects::Pipelines::ReferencesPipeline })
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
it 'only have pipelines with valid keys' do
|
||||
pipeline_keys = subject.pipelines.collect(&:keys).flatten.uniq
|
||||
allowed_keys = %i[pipeline stage minimum_source_version maximum_source_version]
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ RSpec.describe Gitlab::ErrorTracking, feature_category: :shared do
|
|||
end
|
||||
|
||||
after do
|
||||
Sentry.get_current_scope.clear
|
||||
clear_sentry_settings
|
||||
end
|
||||
|
||||
describe '.track_and_raise_for_dev_exception' do
|
||||
|
|
|
|||
|
|
@ -55,8 +55,7 @@ RSpec.describe Gitlab::HTTP, feature_category: :shared do
|
|||
end
|
||||
|
||||
context 'when there is a DB call in the concurrent thread' do
|
||||
it 'raises Gitlab::Utils::ConcurrentRubyThreadIsUsedError error',
|
||||
quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/432145' do
|
||||
it 'raises Gitlab::Utils::ConcurrentRubyThreadIsUsedError error' do
|
||||
stub_request(:get, 'http://example.org').to_return(status: 200, body: 'hello world')
|
||||
|
||||
result = described_class.get('http://example.org', async: true) do |_fragment|
|
||||
|
|
|
|||
|
|
@ -320,6 +320,10 @@ RSpec.describe API::Helpers, :enable_admin_mode, feature_category: :system_acces
|
|||
Gitlab::ErrorTracking.configure
|
||||
end
|
||||
|
||||
after do
|
||||
clear_sentry_settings
|
||||
end
|
||||
|
||||
it 'does not report a MethodNotAllowed exception to Sentry' do
|
||||
exception = Grape::Exceptions::MethodNotAllowed.new({ 'X-GitLab-Test' => '1' })
|
||||
allow(exception).to receive(:backtrace).and_return(caller)
|
||||
|
|
|
|||
|
|
@ -240,4 +240,44 @@ RSpec.describe API::Ml::Mlflow::RegisteredModels, feature_category: :mlops do
|
|||
it_behaves_like 'MLflow|a read/write model registry resource'
|
||||
end
|
||||
end
|
||||
|
||||
describe 'GET /projects/:id/ml/mlflow/api/2.0/mlflow/registered-models/search' do
|
||||
let_it_be(:model2) do
|
||||
create(:ml_models, :with_metadata, project: project)
|
||||
end
|
||||
|
||||
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/registered-models/search" }
|
||||
|
||||
it 'returns all the models', :aggregate_failures do
|
||||
is_expected.to have_gitlab_http_status(:ok)
|
||||
is_expected.to match_response_schema('ml/list_models')
|
||||
expect(json_response["registered_models"].count).to be(2)
|
||||
end
|
||||
|
||||
context "with a valid filter supplied" do
|
||||
let(:filter) { "name='#{model2.name}'" }
|
||||
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/registered-models/search?filter=#{filter}" }
|
||||
|
||||
it 'returns only the models for the given filter' do
|
||||
is_expected.to have_gitlab_http_status(:ok)
|
||||
expect(json_response["registered_models"].count).to be(1)
|
||||
end
|
||||
end
|
||||
|
||||
context "with an invalid filter supplied" do
|
||||
let(:filter) { "description='foo'" }
|
||||
let(:route) { "/projects/#{project_id}/ml/mlflow/api/2.0/mlflow/registered-models/search?filter=#{filter}" }
|
||||
|
||||
it 'returns an error' do
|
||||
is_expected.to have_gitlab_http_status(:bad_request)
|
||||
|
||||
expect(json_response).to include({ 'error_code' => 'INVALID_PARAMETER_VALUE' })
|
||||
end
|
||||
end
|
||||
|
||||
describe 'Error States' do
|
||||
it_behaves_like 'MLflow|an authenticated resource'
|
||||
it_behaves_like 'MLflow|a read-only model registry resource'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -120,6 +120,12 @@ module StubConfiguration
|
|||
.to receive(:sentry_clientside_dsn) { clientside_dsn }
|
||||
end
|
||||
|
||||
def clear_sentry_settings
|
||||
::Sentry.get_current_scope.clear
|
||||
::Sentry.close
|
||||
::Raven.configuration = ::Raven::Configuration.new
|
||||
end
|
||||
|
||||
def stub_microsoft_graph_mailer_setting(messages)
|
||||
allow(Gitlab.config.microsoft_graph_mailer).to receive_messages(to_settings(messages))
|
||||
end
|
||||
|
|
|
|||
|
|
@ -65,5 +65,17 @@ RSpec.shared_examples Gitlab::BitbucketServerImport::ObjectImporter do
|
|||
|
||||
it_behaves_like 'notifies the waiter'
|
||||
end
|
||||
|
||||
context 'when project import has failed' do
|
||||
let_it_be(:project_id) { create(:project, :import_failed).id }
|
||||
|
||||
it 'does not call the importer' do
|
||||
expect_next(worker.importer_class).not_to receive(:execute)
|
||||
|
||||
worker.perform(project_id, {}, waiter_key)
|
||||
end
|
||||
|
||||
it_behaves_like 'notifies the waiter'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -21,11 +21,11 @@ type KeyWatcher struct {
|
|||
subscribers map[string][]chan string
|
||||
shutdown chan struct{}
|
||||
reconnectBackoff backoff.Backoff
|
||||
redisConn *redis.Client
|
||||
redisConn *redis.Client // can be nil
|
||||
conn *redis.PubSub
|
||||
}
|
||||
|
||||
func NewKeyWatcher() *KeyWatcher {
|
||||
func NewKeyWatcher(redisConn *redis.Client) *KeyWatcher {
|
||||
return &KeyWatcher{
|
||||
shutdown: make(chan struct{}),
|
||||
reconnectBackoff: backoff.Backoff{
|
||||
|
|
@ -34,6 +34,7 @@ func NewKeyWatcher() *KeyWatcher {
|
|||
Factor: 2,
|
||||
Jitter: true,
|
||||
},
|
||||
redisConn: redisConn,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -125,16 +126,13 @@ func (kw *KeyWatcher) receivePubSubStream(ctx context.Context, pubsub *redis.Pub
|
|||
}
|
||||
}
|
||||
|
||||
func (kw *KeyWatcher) Process(client *redis.Client) {
|
||||
func (kw *KeyWatcher) Process() {
|
||||
log.Info("keywatcher: starting process loop")
|
||||
|
||||
ctx := context.Background() // lint:allow context.Background
|
||||
kw.mu.Lock()
|
||||
kw.redisConn = client
|
||||
kw.mu.Unlock()
|
||||
|
||||
for {
|
||||
pubsub := client.Subscribe(ctx, []string{}...)
|
||||
pubsub := kw.redisConn.Subscribe(ctx, []string{}...)
|
||||
if err := pubsub.Ping(ctx); err != nil {
|
||||
log.WithError(fmt.Errorf("keywatcher: %v", err)).Error()
|
||||
time.Sleep(kw.reconnectBackoff.Duration())
|
||||
|
|
|
|||
|
|
@ -7,6 +7,8 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/redis/go-redis/v9"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
|
||||
|
|
@ -18,24 +20,28 @@ const (
|
|||
runnerKey = "runner:build_queue:10"
|
||||
)
|
||||
|
||||
func initRdb() {
|
||||
buf, _ := os.ReadFile("../../config.toml")
|
||||
cfg, _ := config.LoadConfig(string(buf))
|
||||
Configure(cfg.Redis)
|
||||
func initRdb(t *testing.T) *redis.Client {
|
||||
buf, err := os.ReadFile("../../config.toml")
|
||||
require.NoError(t, err)
|
||||
cfg, err := config.LoadConfig(string(buf))
|
||||
require.NoError(t, err)
|
||||
rdb, err := Configure(cfg.Redis)
|
||||
require.NoError(t, err)
|
||||
t.Cleanup(func() {
|
||||
assert.NoError(t, rdb.Close())
|
||||
})
|
||||
return rdb
|
||||
}
|
||||
|
||||
func (kw *KeyWatcher) countSubscribers(key string) int {
|
||||
func countSubscribers(kw *KeyWatcher, key string) int {
|
||||
kw.mu.Lock()
|
||||
defer kw.mu.Unlock()
|
||||
return len(kw.subscribers[key])
|
||||
}
|
||||
|
||||
// Forces a run of the `Process` loop against a mock PubSubConn.
|
||||
func (kw *KeyWatcher) processMessages(t *testing.T, numWatchers int, value string, ready chan<- struct{}, wg *sync.WaitGroup) {
|
||||
kw.mu.Lock()
|
||||
kw.redisConn = rdb
|
||||
func processMessages(t *testing.T, kw *KeyWatcher, numWatchers int, value string, ready chan<- struct{}, wg *sync.WaitGroup) {
|
||||
psc := kw.redisConn.Subscribe(ctx, []string{}...)
|
||||
kw.mu.Unlock()
|
||||
|
||||
errC := make(chan error)
|
||||
go func() { errC <- kw.receivePubSubStream(ctx, psc) }()
|
||||
|
|
@ -48,7 +54,7 @@ func (kw *KeyWatcher) processMessages(t *testing.T, numWatchers int, value strin
|
|||
close(ready)
|
||||
|
||||
require.Eventually(t, func() bool {
|
||||
return kw.countSubscribers(runnerKey) == numWatchers
|
||||
return countSubscribers(kw, runnerKey) == numWatchers
|
||||
}, time.Second, time.Millisecond)
|
||||
|
||||
// send message after listeners are ready
|
||||
|
|
@ -74,7 +80,7 @@ type keyChangeTestCase struct {
|
|||
}
|
||||
|
||||
func TestKeyChangesInstantReturn(t *testing.T) {
|
||||
initRdb()
|
||||
rdb := initRdb(t)
|
||||
|
||||
testCases := []keyChangeTestCase{
|
||||
// WatchKeyStatusAlreadyChanged
|
||||
|
|
@ -118,13 +124,10 @@ func TestKeyChangesInstantReturn(t *testing.T) {
|
|||
rdb.Set(ctx, runnerKey, tc.returnValue, 0)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
rdb.FlushDB(ctx)
|
||||
}()
|
||||
defer rdb.FlushDB(ctx)
|
||||
|
||||
kw := NewKeyWatcher()
|
||||
kw := NewKeyWatcher(rdb)
|
||||
defer kw.Shutdown()
|
||||
kw.redisConn = rdb
|
||||
kw.conn = kw.redisConn.Subscribe(ctx, []string{}...)
|
||||
|
||||
val, err := kw.WatchKey(ctx, runnerKey, tc.watchValue, tc.timeout)
|
||||
|
|
@ -136,7 +139,7 @@ func TestKeyChangesInstantReturn(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestKeyChangesWhenWatching(t *testing.T) {
|
||||
initRdb()
|
||||
rdb := initRdb(t)
|
||||
|
||||
testCases := []keyChangeTestCase{
|
||||
// WatchKeyStatusSeenChange
|
||||
|
|
@ -170,11 +173,9 @@ func TestKeyChangesWhenWatching(t *testing.T) {
|
|||
rdb.Set(ctx, runnerKey, tc.returnValue, 0)
|
||||
}
|
||||
|
||||
kw := NewKeyWatcher()
|
||||
kw := NewKeyWatcher(rdb)
|
||||
defer kw.Shutdown()
|
||||
defer func() {
|
||||
rdb.FlushDB(ctx)
|
||||
}()
|
||||
defer rdb.FlushDB(ctx)
|
||||
|
||||
wg := &sync.WaitGroup{}
|
||||
wg.Add(1)
|
||||
|
|
@ -189,13 +190,13 @@ func TestKeyChangesWhenWatching(t *testing.T) {
|
|||
require.Equal(t, tc.expectedStatus, val, "Expected value")
|
||||
}()
|
||||
|
||||
kw.processMessages(t, 1, tc.processedValue, ready, wg)
|
||||
processMessages(t, kw, 1, tc.processedValue, ready, wg)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestKeyChangesParallel(t *testing.T) {
|
||||
initRdb()
|
||||
rdb := initRdb(t)
|
||||
|
||||
testCases := []keyChangeTestCase{
|
||||
{
|
||||
|
|
@ -222,15 +223,13 @@ func TestKeyChangesParallel(t *testing.T) {
|
|||
rdb.Set(ctx, runnerKey, tc.returnValue, 0)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
rdb.FlushDB(ctx)
|
||||
}()
|
||||
defer rdb.FlushDB(ctx)
|
||||
|
||||
wg := &sync.WaitGroup{}
|
||||
wg.Add(runTimes)
|
||||
ready := make(chan struct{})
|
||||
|
||||
kw := NewKeyWatcher()
|
||||
kw := NewKeyWatcher(rdb)
|
||||
defer kw.Shutdown()
|
||||
|
||||
for i := 0; i < runTimes; i++ {
|
||||
|
|
@ -244,16 +243,15 @@ func TestKeyChangesParallel(t *testing.T) {
|
|||
}()
|
||||
}
|
||||
|
||||
kw.processMessages(t, runTimes, tc.processedValue, ready, wg)
|
||||
processMessages(t, kw, runTimes, tc.processedValue, ready, wg)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestShutdown(t *testing.T) {
|
||||
initRdb()
|
||||
rdb := initRdb(t)
|
||||
|
||||
kw := NewKeyWatcher()
|
||||
kw.redisConn = rdb
|
||||
kw := NewKeyWatcher(rdb)
|
||||
kw.conn = kw.redisConn.Subscribe(ctx, []string{}...)
|
||||
defer kw.Shutdown()
|
||||
|
||||
|
|
@ -272,14 +270,14 @@ func TestShutdown(t *testing.T) {
|
|||
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
require.Eventually(t, func() bool { return kw.countSubscribers(runnerKey) == 1 }, 10*time.Second, time.Millisecond)
|
||||
require.Eventually(t, func() bool { return countSubscribers(kw, runnerKey) == 1 }, 10*time.Second, time.Millisecond)
|
||||
|
||||
kw.Shutdown()
|
||||
}()
|
||||
|
||||
wg.Wait()
|
||||
|
||||
require.Eventually(t, func() bool { return kw.countSubscribers(runnerKey) == 0 }, 10*time.Second, time.Millisecond)
|
||||
require.Eventually(t, func() bool { return countSubscribers(kw, runnerKey) == 0 }, 10*time.Second, time.Millisecond)
|
||||
|
||||
// Adding a key after the shutdown should result in an immediate response
|
||||
var val WatchKeyStatus
|
||||
|
|
|
|||
|
|
@ -17,7 +17,6 @@ import (
|
|||
)
|
||||
|
||||
var (
|
||||
rdb *redis.Client
|
||||
// found in https://github.com/redis/go-redis/blob/c7399b6a17d7d3e2a57654528af91349f2468529/sentinel.go#L626
|
||||
errSentinelMasterAddr error = errors.New("redis: all sentinels specified in configuration are unreachable")
|
||||
|
||||
|
|
@ -129,16 +128,13 @@ func (s sentinelInstrumentationHook) ProcessPipelineHook(next redis.ProcessPipel
|
|||
}
|
||||
}
|
||||
|
||||
func GetRedisClient() *redis.Client {
|
||||
return rdb
|
||||
}
|
||||
|
||||
// Configure redis-connection
|
||||
func Configure(cfg *config.RedisConfig) error {
|
||||
func Configure(cfg *config.RedisConfig) (*redis.Client, error) {
|
||||
if cfg == nil {
|
||||
return nil
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var rdb *redis.Client
|
||||
var err error
|
||||
|
||||
if len(cfg.Sentinel) > 0 {
|
||||
|
|
@ -147,7 +143,7 @@ func Configure(cfg *config.RedisConfig) error {
|
|||
rdb, err = configureRedis(cfg)
|
||||
}
|
||||
|
||||
return err
|
||||
return rdb, err
|
||||
}
|
||||
|
||||
func configureRedis(cfg *config.RedisConfig) (*redis.Client, error) {
|
||||
|
|
|
|||
|
|
@ -29,8 +29,8 @@ func mockRedisServer(t *testing.T, connectReceived *atomic.Value) string {
|
|||
}
|
||||
|
||||
func TestConfigureNoConfig(t *testing.T) {
|
||||
rdb = nil
|
||||
Configure(nil)
|
||||
rdb, err := Configure(nil)
|
||||
require.NoError(t, err)
|
||||
require.Nil(t, rdb, "rdb client should be nil")
|
||||
}
|
||||
|
||||
|
|
@ -57,15 +57,15 @@ func TestConfigureValidConfigX(t *testing.T) {
|
|||
parsedURL := helper.URLMustParse(tc.scheme + "://" + a)
|
||||
cfg := &config.RedisConfig{URL: config.TomlURL{URL: *parsedURL}}
|
||||
|
||||
Configure(cfg)
|
||||
rdb, err := Configure(cfg)
|
||||
require.NoError(t, err)
|
||||
defer rdb.Close()
|
||||
|
||||
require.NotNil(t, GetRedisClient().Conn(), "Pool should not be nil")
|
||||
require.NotNil(t, rdb.Conn(), "Pool should not be nil")
|
||||
|
||||
// goredis initialise connections lazily
|
||||
rdb.Ping(context.Background())
|
||||
require.True(t, connectReceived.Load().(bool))
|
||||
|
||||
rdb = nil
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -96,15 +96,15 @@ func TestConnectToSentinel(t *testing.T) {
|
|||
}
|
||||
|
||||
cfg := &config.RedisConfig{Sentinel: sentinelUrls}
|
||||
Configure(cfg)
|
||||
rdb, err := Configure(cfg)
|
||||
require.NoError(t, err)
|
||||
defer rdb.Close()
|
||||
|
||||
require.NotNil(t, GetRedisClient().Conn(), "Pool should not be nil")
|
||||
require.NotNil(t, rdb.Conn(), "Pool should not be nil")
|
||||
|
||||
// goredis initialise connections lazily
|
||||
rdb.Ping(context.Background())
|
||||
require.True(t, connectReceived.Load().(bool))
|
||||
|
||||
rdb = nil
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -225,13 +225,14 @@ func run(boot bootConfig, cfg config.Config) error {
|
|||
|
||||
log.Info("Using redis/go-redis")
|
||||
|
||||
redisKeyWatcher := redis.NewKeyWatcher()
|
||||
if err := redis.Configure(cfg.Redis); err != nil {
|
||||
rdb, err := redis.Configure(cfg.Redis)
|
||||
if err != nil {
|
||||
log.WithError(err).Error("unable to configure redis client")
|
||||
}
|
||||
redisKeyWatcher := redis.NewKeyWatcher(rdb)
|
||||
|
||||
if rdb := redis.GetRedisClient(); rdb != nil {
|
||||
go redisKeyWatcher.Process(rdb)
|
||||
if rdb != nil {
|
||||
go redisKeyWatcher.Process()
|
||||
}
|
||||
|
||||
watchKeyFn := redisKeyWatcher.WatchKey
|
||||
|
|
|
|||
Loading…
Reference in New Issue