Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
8efd9d502c
commit
1f3fdb1a6b
|
|
@ -48,7 +48,7 @@ build-gdk-image:
|
|||
- .base-image-build-buildx
|
||||
- .build-images:rules:build-gdk-image
|
||||
tags:
|
||||
- docker
|
||||
- $GITLAB_LARGE_RUNNER_OPTIONAL
|
||||
stage: build-images
|
||||
needs:
|
||||
- job: gdk:compile-test-assets
|
||||
|
|
|
|||
2
Gemfile
2
Gemfile
|
|
@ -602,7 +602,7 @@ group :test do
|
|||
# Moved in `test` because https://gitlab.com/gitlab-org/gitlab/-/issues/217527
|
||||
gem 'derailed_benchmarks', require: false, feature_category: :shared
|
||||
|
||||
gem 'gitlab_quality-test_tooling', '~> 2.4.0', require: false, feature_category: :tooling
|
||||
gem 'gitlab_quality-test_tooling', '~> 2.8.0', require: false, feature_category: :tooling
|
||||
end
|
||||
|
||||
gem 'octokit', '~> 9.0', feature_category: :importers
|
||||
|
|
|
|||
|
|
@ -243,7 +243,7 @@
|
|||
{"name":"gitlab-styles","version":"13.1.0","platform":"ruby","checksum":"46c7c5729616355868b7b40a4ffcd052b36346076042abe8cafaee1688cbf2c1"},
|
||||
{"name":"gitlab_chronic_duration","version":"0.12.0","platform":"ruby","checksum":"0d766944d415b5c831f176871ee8625783fc0c5bfbef2d79a3a616f207ffc16d"},
|
||||
{"name":"gitlab_omniauth-ldap","version":"2.2.0","platform":"ruby","checksum":"bb4d20acb3b123ed654a8f6a47d3fac673ece7ed0b6992edb92dca14bad2838c"},
|
||||
{"name":"gitlab_quality-test_tooling","version":"2.4.0","platform":"ruby","checksum":"2e4c12528b05253f1b9c9cef49f4f545d4aab94bf28ca337611c795b2cfb36a0"},
|
||||
{"name":"gitlab_quality-test_tooling","version":"2.8.0","platform":"ruby","checksum":"29fbf9b26e1510718e178fea04ff8f332b376fef0cbe687dc3e407a76c6c3cb1"},
|
||||
{"name":"globalid","version":"1.1.0","platform":"ruby","checksum":"b337e1746f0c8cb0a6c918234b03a1ddeb4966206ce288fbb57779f59b2d154f"},
|
||||
{"name":"gon","version":"6.4.0","platform":"ruby","checksum":"e3a618d659392890f1aa7db420f17c75fd7d35aeb5f8fe003697d02c4b88d2f0"},
|
||||
{"name":"google-apis-androidpublisher_v3","version":"0.34.0","platform":"ruby","checksum":"d7e1d7dd92f79c498fe2082222a1740d788e022e660c135564b3fd299cab5425"},
|
||||
|
|
|
|||
|
|
@ -801,7 +801,7 @@ GEM
|
|||
omniauth (>= 1.3, < 3)
|
||||
pyu-ruby-sasl (>= 0.0.3.3, < 0.1)
|
||||
rubyntlm (~> 0.5)
|
||||
gitlab_quality-test_tooling (2.4.0)
|
||||
gitlab_quality-test_tooling (2.8.0)
|
||||
activesupport (>= 7.0, < 7.2)
|
||||
amatch (~> 0.4.1)
|
||||
fog-google (~> 1.24, >= 1.24.1)
|
||||
|
|
@ -2120,7 +2120,7 @@ DEPENDENCIES
|
|||
gitlab-utils!
|
||||
gitlab_chronic_duration (~> 0.12)
|
||||
gitlab_omniauth-ldap (~> 2.2.0)
|
||||
gitlab_quality-test_tooling (~> 2.4.0)
|
||||
gitlab_quality-test_tooling (~> 2.8.0)
|
||||
gon (~> 6.4.0)
|
||||
google-apis-androidpublisher_v3 (~> 0.34.0)
|
||||
google-apis-cloudbilling_v1 (~> 0.22.0)
|
||||
|
|
|
|||
|
|
@ -243,7 +243,7 @@
|
|||
{"name":"gitlab-styles","version":"13.1.0","platform":"ruby","checksum":"46c7c5729616355868b7b40a4ffcd052b36346076042abe8cafaee1688cbf2c1"},
|
||||
{"name":"gitlab_chronic_duration","version":"0.12.0","platform":"ruby","checksum":"0d766944d415b5c831f176871ee8625783fc0c5bfbef2d79a3a616f207ffc16d"},
|
||||
{"name":"gitlab_omniauth-ldap","version":"2.2.0","platform":"ruby","checksum":"bb4d20acb3b123ed654a8f6a47d3fac673ece7ed0b6992edb92dca14bad2838c"},
|
||||
{"name":"gitlab_quality-test_tooling","version":"2.4.0","platform":"ruby","checksum":"2e4c12528b05253f1b9c9cef49f4f545d4aab94bf28ca337611c795b2cfb36a0"},
|
||||
{"name":"gitlab_quality-test_tooling","version":"2.8.0","platform":"ruby","checksum":"29fbf9b26e1510718e178fea04ff8f332b376fef0cbe687dc3e407a76c6c3cb1"},
|
||||
{"name":"globalid","version":"1.1.0","platform":"ruby","checksum":"b337e1746f0c8cb0a6c918234b03a1ddeb4966206ce288fbb57779f59b2d154f"},
|
||||
{"name":"gon","version":"6.4.0","platform":"ruby","checksum":"e3a618d659392890f1aa7db420f17c75fd7d35aeb5f8fe003697d02c4b88d2f0"},
|
||||
{"name":"google-apis-androidpublisher_v3","version":"0.34.0","platform":"ruby","checksum":"d7e1d7dd92f79c498fe2082222a1740d788e022e660c135564b3fd299cab5425"},
|
||||
|
|
|
|||
|
|
@ -813,7 +813,7 @@ GEM
|
|||
omniauth (>= 1.3, < 3)
|
||||
pyu-ruby-sasl (>= 0.0.3.3, < 0.1)
|
||||
rubyntlm (~> 0.5)
|
||||
gitlab_quality-test_tooling (2.4.0)
|
||||
gitlab_quality-test_tooling (2.8.0)
|
||||
activesupport (>= 7.0, < 7.2)
|
||||
amatch (~> 0.4.1)
|
||||
fog-google (~> 1.24, >= 1.24.1)
|
||||
|
|
@ -2155,7 +2155,7 @@ DEPENDENCIES
|
|||
gitlab-utils!
|
||||
gitlab_chronic_duration (~> 0.12)
|
||||
gitlab_omniauth-ldap (~> 2.2.0)
|
||||
gitlab_quality-test_tooling (~> 2.4.0)
|
||||
gitlab_quality-test_tooling (~> 2.8.0)
|
||||
gon (~> 6.4.0)
|
||||
google-apis-androidpublisher_v3 (~> 0.34.0)
|
||||
google-apis-cloudbilling_v1 (~> 0.22.0)
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ export default {
|
|||
<file-row
|
||||
:file="file"
|
||||
v-bind="$attrs"
|
||||
:class="{ 'is-active': isActive }"
|
||||
:class="{ 'is-active': isActive, 'is-loading': file.loading }"
|
||||
class="diff-file-row"
|
||||
truncate-middle
|
||||
:file-classes="fileClasses"
|
||||
|
|
|
|||
|
|
@ -21,6 +21,11 @@ export default {
|
|||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
loadedFiles: {
|
||||
type: Object,
|
||||
required: false,
|
||||
default: null,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
const treeWidth =
|
||||
|
|
@ -65,6 +70,10 @@ export default {
|
|||
side="right"
|
||||
@resize-end="cacheTreeListWidth"
|
||||
/>
|
||||
<tree-list :hide-file-stats="hideFileStats" @clickFile="$emit('clickFile', $event)" />
|
||||
<tree-list
|
||||
:hide-file-stats="hideFileStats"
|
||||
:loaded-files="loadedFiles"
|
||||
@clickFile="$emit('clickFile', $event)"
|
||||
/>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -36,6 +36,11 @@ export default {
|
|||
type: Boolean,
|
||||
required: true,
|
||||
},
|
||||
loadedFiles: {
|
||||
type: Object,
|
||||
required: false,
|
||||
default: null,
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
|
|
@ -82,13 +87,17 @@ export default {
|
|||
flatFilteredTreeList() {
|
||||
const result = [];
|
||||
const createFlatten = (level, hidden) => (item) => {
|
||||
const isTree = item.type === 'tree';
|
||||
const loading =
|
||||
!isTree && !item.isHeader && this.loadedFiles && !this.loadedFiles[item.fileHash];
|
||||
result.push({
|
||||
...item,
|
||||
hidden,
|
||||
level: item.isHeader ? 0 : level,
|
||||
key: item.key || item.path,
|
||||
loading,
|
||||
});
|
||||
const isHidden = hidden || (item.type === 'tree' && !item.opened);
|
||||
const isHidden = hidden || (isTree && !item.opened);
|
||||
item.tree.forEach(createFlatten(level + 1, isHidden));
|
||||
};
|
||||
|
||||
|
|
@ -229,11 +238,11 @@ export default {
|
|||
:current-diff-file-id="currentDiffFileId"
|
||||
:style="{ '--level': item.level }"
|
||||
:class="{ 'tree-list-parent': item.level > 0 }"
|
||||
:tabindex="0"
|
||||
:tabindex="item.loading ? -1 : 0"
|
||||
class="gl-relative !gl-m-1"
|
||||
:data-file-row="item.fileHash"
|
||||
@toggleTreeOpen="toggleTreeOpen"
|
||||
@clickFile="$emit('clickFile', $event)"
|
||||
@clickFile="!item.loading && $emit('clickFile', $event)"
|
||||
/>
|
||||
</template>
|
||||
<template #after>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import Vue from 'vue';
|
||||
import App from './app.vue';
|
||||
import ImportFromFogBugzApp from './import_from_fogbugz_app.vue';
|
||||
|
||||
export function initFogbugzImportProjectForm() {
|
||||
const el = document.getElementById('js-vue-import-fogbugz-project-app');
|
||||
|
|
@ -14,8 +14,9 @@ export function initFogbugzImportProjectForm() {
|
|||
|
||||
return new Vue({
|
||||
el,
|
||||
name: 'ImportFromFogBugzRoot',
|
||||
render(createElement) {
|
||||
return createElement(App, { props });
|
||||
return createElement(ImportFromFogBugzApp, { props });
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import Vue from 'vue';
|
||||
import ImportFromGiteaRoot from './import_from_gitea_root.vue';
|
||||
import ImportFromGiteaApp from './import_from_gitea_app.vue';
|
||||
|
||||
export function initGiteaImportProjectForm() {
|
||||
const el = document.getElementById('js-vue-import-gitea-project-root');
|
||||
|
|
@ -16,7 +16,7 @@ export function initGiteaImportProjectForm() {
|
|||
el,
|
||||
name: 'ImportFromGiteaRoot',
|
||||
render(h) {
|
||||
return h(ImportFromGiteaRoot, { props });
|
||||
return h(ImportFromGiteaApp, { props });
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,12 +9,10 @@ import {
|
|||
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
|
||||
import { helpPagePath } from '~/helpers/help_page_helper';
|
||||
import * as Sentry from '~/sentry/sentry_browser_wrapper';
|
||||
import { s__ } from '~/locale';
|
||||
import { n__, s__, sprintf } from '~/locale';
|
||||
import EmptyState from '../components/model_list_empty_state.vue';
|
||||
import * as i18n from '../translations';
|
||||
import { BASE_SORT_FIELDS, MLFLOW_USAGE_MODAL_ID } from '../constants';
|
||||
import getModelsQuery from '../graphql/queries/get_models.query.graphql';
|
||||
import { makeLoadModelErrorMessage } from '../translations';
|
||||
import SearchableTable from '../components/searchable_table.vue';
|
||||
import MlflowUsageModal from '../components/mlflow_usage_modal.vue';
|
||||
|
||||
|
|
@ -110,13 +108,13 @@ export default {
|
|||
},
|
||||
createModelItem() {
|
||||
return {
|
||||
text: this.$options.i18n.create_new_model,
|
||||
text: s__('MlModelRegistry|Create new model'),
|
||||
href: this.createModelPath,
|
||||
};
|
||||
},
|
||||
mlflowUsageModalItem() {
|
||||
return {
|
||||
text: this.$options.i18n.import_mlflow,
|
||||
text: s__('MlModelRegistry|Import model using MLflow'),
|
||||
};
|
||||
},
|
||||
},
|
||||
|
|
@ -136,15 +134,18 @@ export default {
|
|||
this.$apollo.queries.models.fetchMore({});
|
||||
},
|
||||
handleError(error) {
|
||||
this.errorMessage = makeLoadModelErrorMessage(error.message);
|
||||
this.errorMessage = sprintf(
|
||||
s__('MlModelRegistry|Failed to load models with error: %{message}'),
|
||||
{ message: error.message },
|
||||
);
|
||||
Sentry.captureException(error);
|
||||
},
|
||||
},
|
||||
i18n: {
|
||||
create_import_title: s__('MlModelRegistry|Create/Import'),
|
||||
create_new_model: s__('MlModelRegistry|Create new model'),
|
||||
import_mlflow: s__('MlModelRegistry|Import model using MLflow'),
|
||||
...i18n,
|
||||
createImportTitle: s__('MlModelRegistry|Create/Import model'),
|
||||
titleLabel: s__('MlModelRegistry|Model registry'),
|
||||
modelsCountLabel: (modelCount) =>
|
||||
n__('MlModelRegistry|%d model', 'MlModelRegistry|%d models', modelCount),
|
||||
},
|
||||
sortableFields: BASE_SORT_FIELDS,
|
||||
docHref: helpPagePath('user/project/ml/model_registry/_index.md'),
|
||||
|
|
@ -164,7 +165,7 @@ export default {
|
|||
<title-area>
|
||||
<template #title>
|
||||
<div class="gl-flex gl-grow gl-items-center">
|
||||
<span>{{ $options.i18n.TITLE_LABEL }}</span>
|
||||
<span>{{ $options.i18n.titleLabel }}</span>
|
||||
</div>
|
||||
</template>
|
||||
<template #metadata-models-count>
|
||||
|
|
@ -176,7 +177,7 @@ export default {
|
|||
<template #right-actions>
|
||||
<gl-disclosure-dropdown
|
||||
v-if="canWriteModelRegistry"
|
||||
:toggle-text="$options.i18n.create_import_title"
|
||||
:toggle-text="$options.i18n.createImportTitle"
|
||||
toggle-class="gl-w-full"
|
||||
data-testid="create-model-button"
|
||||
variant="confirm"
|
||||
|
|
|
|||
|
|
@ -145,7 +145,7 @@ export default {
|
|||
return this.$apollo.queries.modelWithModelVersion.loading;
|
||||
},
|
||||
title() {
|
||||
return `${this.modelName} / ${this.versionName}`;
|
||||
return `${this.modelName} / ${this.$options.i18n.versionLabelText} ${this.versionName}`;
|
||||
},
|
||||
queryVariables() {
|
||||
return {
|
||||
|
|
@ -240,6 +240,7 @@ export default {
|
|||
performance: s__('MlModelRegistry|Performance'),
|
||||
},
|
||||
noneText: __('None'),
|
||||
versionLabelText: s__('MlModelRegistry|version'),
|
||||
},
|
||||
ROUTE_DETAILS,
|
||||
ROUTE_ARTIFACTS,
|
||||
|
|
|
|||
|
|
@ -2,8 +2,7 @@
|
|||
import { GlEmptyState, GlButton } from '@gitlab/ui';
|
||||
import emptySvgUrl from '@gitlab/svgs/dist/illustrations/status/status-new-md.svg';
|
||||
import * as Sentry from '~/sentry/sentry_browser_wrapper';
|
||||
import { s__, __ } from '~/locale';
|
||||
import { makeLoadCandidatesErrorMessage, NO_CANDIDATES_LABEL } from '../translations';
|
||||
import { s__, __, sprintf } from '~/locale';
|
||||
import getModelCandidatesQuery from '../graphql/queries/get_model_candidates.query.graphql';
|
||||
import { GRAPHQL_PAGE_SIZE, CANDIDATES_DOCS_PATH } from '../constants';
|
||||
import SearchableTable from './searchable_table.vue';
|
||||
|
|
@ -67,12 +66,16 @@ export default {
|
|||
};
|
||||
},
|
||||
handleError(error) {
|
||||
this.errorMessage = makeLoadCandidatesErrorMessage(error.message);
|
||||
this.errorMessage = sprintf(
|
||||
s__('MlModelRegistry|Failed to load model runs with error: %{message}'),
|
||||
{
|
||||
message: error.message,
|
||||
},
|
||||
);
|
||||
Sentry.captureException(error);
|
||||
},
|
||||
},
|
||||
i18n: {
|
||||
NO_CANDIDATES_LABEL,
|
||||
learnMore: __('Learn more'),
|
||||
emptyStateLabel: s__('MlModelRegistry|No runs associated with this model'),
|
||||
emptyStateDescription: s__(
|
||||
|
|
|
|||
|
|
@ -1,9 +1,4 @@
|
|||
import { __, s__, n__, sprintf } from '~/locale';
|
||||
|
||||
export const TITLE_LABEL = s__('MlModelRegistry|Model registry');
|
||||
|
||||
export const modelsCountLabel = (modelCount) =>
|
||||
n__('MlModelRegistry|%d model', 'MlModelRegistry|%d models', modelCount);
|
||||
import { __, s__, sprintf } from '~/locale';
|
||||
|
||||
export const DESCRIPTION_LABEL = __('Description');
|
||||
export const NO_DESCRIPTION_PROVIDED_LABEL = s__('MlModelRegistry|No description provided');
|
||||
|
|
@ -41,15 +36,4 @@ export const makeLoadVersionsErrorMessage = (message) =>
|
|||
message,
|
||||
});
|
||||
|
||||
export const makeLoadModelErrorMessage = (message) =>
|
||||
sprintf(s__('MlModelRegistry|Failed to load model with error: %{message}'), {
|
||||
message,
|
||||
});
|
||||
|
||||
export const NO_CANDIDATES_LABEL = s__('MlModelRegistry|This model has no runs');
|
||||
export const makeLoadCandidatesErrorMessage = (message) =>
|
||||
sprintf(s__('MlModelRegistry|Failed to load model runs with error: %{message}'), {
|
||||
message,
|
||||
});
|
||||
|
||||
export const CREATE_MODEL_LINK_TITLE = s__('MlModelRegistry|Create model');
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ const ICON_COLORS = {
|
|||
issues: 'system-note-icon-success',
|
||||
error: 'system-note-icon-danger',
|
||||
'review-warning': 'system-note-icon-warning',
|
||||
'comment-lines': 'system-note-icon-info',
|
||||
};
|
||||
|
||||
export default {
|
||||
|
|
|
|||
|
|
@ -359,10 +359,18 @@
|
|||
}
|
||||
}
|
||||
|
||||
.diff-file-row {
|
||||
transition: color 0.2s ease-in-out;
|
||||
}
|
||||
|
||||
.diff-file-row.is-active {
|
||||
background-color: var(--gray-50, $gray-50);
|
||||
}
|
||||
|
||||
.diff-file-row.is-loading {
|
||||
color: var(--gl-text-color-disabled);
|
||||
}
|
||||
|
||||
.mr-info-list {
|
||||
clear: left;
|
||||
position: relative;
|
||||
|
|
|
|||
|
|
@ -34,9 +34,10 @@ module WikiActions
|
|||
|
||||
before_action do
|
||||
push_frontend_feature_flag(:preserve_markdown, container)
|
||||
push_force_frontend_feature_flag(:glql_integration, container&.glql_integration_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_load_on_click, container&.glql_load_on_click_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:continue_indented_text, container&.continue_indented_text_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_integration, !!container&.glql_integration_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_load_on_click, !!container&.glql_load_on_click_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:continue_indented_text,
|
||||
!!container&.continue_indented_text_feature_flag_enabled?)
|
||||
end
|
||||
|
||||
before_action only: [:show, :edit, :update] do
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ class Groups::BoardsController < Groups::ApplicationController
|
|||
before_action do
|
||||
push_frontend_feature_flag(:board_multi_select, group)
|
||||
push_frontend_feature_flag(:issues_list_drawer, group)
|
||||
push_force_frontend_feature_flag(:work_items_beta, group&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, !!group&.work_items_beta_feature_flag_enabled?)
|
||||
end
|
||||
|
||||
feature_category :team_planning
|
||||
|
|
|
|||
|
|
@ -6,15 +6,15 @@ module Groups
|
|||
|
||||
before_action do
|
||||
push_frontend_feature_flag(:notifications_todos_buttons)
|
||||
push_force_frontend_feature_flag(:work_items, group&.work_items_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, group&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_alpha, group&.work_items_alpha_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items, !!group&.work_items_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, !!group&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_alpha, !!group&.work_items_alpha_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:namespace_level_work_items, namespace_work_items_enabled?)
|
||||
push_force_frontend_feature_flag(:create_group_level_work_items,
|
||||
group&.create_group_level_work_items_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_integration, group&.glql_integration_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_load_on_click, group&.glql_load_on_click_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:continue_indented_text, group&.continue_indented_text_feature_flag_enabled?)
|
||||
!!group&.create_group_level_work_items_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_integration, !!group&.glql_integration_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_load_on_click, !!group&.glql_load_on_click_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:continue_indented_text, !!group&.continue_indented_text_feature_flag_enabled?)
|
||||
push_frontend_feature_flag(:issues_list_drawer, group)
|
||||
end
|
||||
before_action :handle_new_work_item_path, only: [:show]
|
||||
|
|
@ -33,7 +33,7 @@ module Groups
|
|||
private
|
||||
|
||||
def namespace_work_items_enabled?
|
||||
group&.namespace_work_items_enabled?
|
||||
!!group&.namespace_work_items_enabled?
|
||||
end
|
||||
|
||||
# The work_items/:iid route renders a Vue app that takes care of the show and new pages.
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ class Projects::BoardsController < Projects::ApplicationController
|
|||
before_action do
|
||||
push_frontend_feature_flag(:board_multi_select, project)
|
||||
push_frontend_feature_flag(:issues_list_drawer, project)
|
||||
push_force_frontend_feature_flag(:work_items_beta, project&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, !!project&.work_items_beta_feature_flag_enabled?)
|
||||
end
|
||||
|
||||
feature_category :team_planning
|
||||
|
|
|
|||
|
|
@ -7,9 +7,9 @@ class Projects::IncidentsController < Projects::ApplicationController
|
|||
before_action :authorize_read_issue!
|
||||
before_action :load_incident, only: [:show]
|
||||
before_action do
|
||||
push_force_frontend_feature_flag(:work_items, @project&.work_items_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, @project&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_alpha, @project&.work_items_alpha_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items, !!@project&.work_items_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, !!@project&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_alpha, !!@project&.work_items_alpha_feature_flag_enabled?)
|
||||
push_frontend_feature_flag(:notifications_todos_buttons, current_user)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -49,11 +49,11 @@ class Projects::IssuesController < Projects::ApplicationController
|
|||
push_frontend_feature_flag(:service_desk_ticket)
|
||||
push_frontend_feature_flag(:issues_list_drawer, project)
|
||||
push_frontend_feature_flag(:notifications_todos_buttons, current_user)
|
||||
push_force_frontend_feature_flag(:glql_integration, project&.glql_integration_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_load_on_click, project&.glql_load_on_click_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:continue_indented_text, project&.continue_indented_text_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, project&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_alpha, project&.work_items_alpha_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_integration, !!project&.glql_integration_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_load_on_click, !!project&.glql_load_on_click_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:continue_indented_text, !!project&.continue_indented_text_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, !!project&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_alpha, !!project&.work_items_alpha_feature_flag_enabled?)
|
||||
end
|
||||
|
||||
before_action only: [:index, :show] do
|
||||
|
|
|
|||
|
|
@ -8,9 +8,9 @@ class Projects::MergeRequests::ApplicationController < Projects::ApplicationCont
|
|||
feature_category :code_review_workflow
|
||||
|
||||
before_action do
|
||||
push_force_frontend_feature_flag(:glql_integration, project&.glql_integration_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_load_on_click, project&.glql_load_on_click_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:continue_indented_text, project&.continue_indented_text_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_integration, !!project&.glql_integration_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_load_on_click, !!project&.glql_load_on_click_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:continue_indented_text, !!project&.continue_indented_text_feature_flag_enabled?)
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -9,12 +9,12 @@ class Projects::WorkItemsController < Projects::ApplicationController
|
|||
before_action :authorize_import_access!, only: [:import_csv, :authorize] # rubocop:disable Rails/LexicallyScopedActionFilter
|
||||
before_action do
|
||||
push_frontend_feature_flag(:notifications_todos_buttons)
|
||||
push_force_frontend_feature_flag(:work_items, project&.work_items_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, project&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_alpha, project&.work_items_alpha_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_integration, project&.glql_integration_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_load_on_click, project&.glql_load_on_click_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:continue_indented_text, project&.continue_indented_text_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items, !!project&.work_items_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, !!project&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_alpha, !!project&.work_items_alpha_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_integration, !!project&.glql_integration_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:glql_load_on_click, !!project&.glql_load_on_click_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:continue_indented_text, !!project&.continue_indented_text_feature_flag_enabled?)
|
||||
push_frontend_feature_flag(:namespace_level_work_items, project&.group)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -51,9 +51,9 @@ class ProjectsController < Projects::ApplicationController
|
|||
push_licensed_feature(:security_orchestration_policies)
|
||||
end
|
||||
|
||||
push_force_frontend_feature_flag(:work_items, @project&.work_items_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, @project&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_alpha, @project&.work_items_alpha_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items, !!@project&.work_items_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_beta, !!@project&.work_items_beta_feature_flag_enabled?)
|
||||
push_force_frontend_feature_flag(:work_items_alpha, !!@project&.work_items_alpha_feature_flag_enabled?)
|
||||
# FF to enable setting to allow webhook execution on 30D and 60D notification delivery too
|
||||
push_frontend_feature_flag(:extended_expiry_webhook_execution_setting, @project&.namespace)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -49,7 +49,8 @@ module SystemNoteHelper
|
|||
'relate_to_parent' => 'link',
|
||||
'unrelate_from_parent' => 'link',
|
||||
'requested_changes' => 'error',
|
||||
'override' => 'review-warning'
|
||||
'override' => 'review-warning',
|
||||
'reviewed' => 'comment-lines'
|
||||
}.freeze
|
||||
|
||||
def system_note_icon_name(note)
|
||||
|
|
|
|||
|
|
@ -126,4 +126,12 @@ class BulkImport < ApplicationRecord
|
|||
def source_url
|
||||
configuration&.url
|
||||
end
|
||||
|
||||
def source_equals_destination?
|
||||
return false unless configuration
|
||||
|
||||
source_uri = URI.parse(configuration.url.to_s)
|
||||
|
||||
source_uri.host == Settings.gitlab.host
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -268,6 +268,8 @@ class BulkImports::Entity < ApplicationRecord
|
|||
end
|
||||
|
||||
def validate_destination_namespace_ascendency
|
||||
return unless bulk_import&.source_equals_destination?
|
||||
|
||||
source = Group.find_by_full_path(source_full_path)
|
||||
|
||||
return unless source
|
||||
|
|
|
|||
|
|
@ -1348,6 +1348,10 @@ module Ci
|
|||
Enums::Ci::Pipeline.dangling_sources.key?(source.to_sym)
|
||||
end
|
||||
|
||||
def disable_all_except_yaml_variables?
|
||||
Enums::Ci::Pipeline.workload_sources.key?(source.to_sym)
|
||||
end
|
||||
|
||||
def source_ref_path
|
||||
if branch? || merge_request?
|
||||
Gitlab::Git::BRANCH_REF_PREFIX + source_ref.to_s
|
||||
|
|
|
|||
|
|
@ -79,6 +79,14 @@ module Enums
|
|||
)
|
||||
end
|
||||
|
||||
# Workloads are always dangling but they also have almost all sources of CI variables disabled by default as they
|
||||
# do not need access most of the kinds of CI variables.
|
||||
def self.workload_sources
|
||||
dangling_sources.slice(
|
||||
:duo_workflow
|
||||
)
|
||||
end
|
||||
|
||||
# CI sources are those pipeline events that affect the CI status of the ref
|
||||
# they run for. By definition it excludes dangling pipelines.
|
||||
def self.ci_sources
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ class SystemNoteMetadata < ApplicationRecord
|
|||
tag due_date start_date_or_due_date pinned_embed cherry_pick health_status approved unapproved
|
||||
status alert_issue_added relate unrelate new_alert_added severity contact timeline_event
|
||||
issue_type relate_to_child unrelate_from_child relate_to_parent unrelate_from_parent override
|
||||
issue_email_participants requested_changes
|
||||
issue_email_participants requested_changes reviewed
|
||||
].freeze
|
||||
|
||||
validates :note, presence: true, unless: :importing?
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ module BulkImports
|
|||
self.class.name,
|
||||
'create',
|
||||
label: 'bulk_import_group',
|
||||
extra: { source_equals_destination: source_equals_destination? }
|
||||
extra: { source_equals_destination: bulk_import.source_equals_destination? }
|
||||
)
|
||||
|
||||
if Feature.enabled?(:importer_user_mapping, current_user) &&
|
||||
|
|
@ -161,10 +161,6 @@ module BulkImports
|
|||
)
|
||||
end
|
||||
|
||||
def source_equals_destination?
|
||||
credentials[:url].starts_with?(Settings.gitlab.base_url)
|
||||
end
|
||||
|
||||
def validate_destination_namespace(entity_params)
|
||||
destination_namespace = entity_params[:destination_namespace]
|
||||
source_type = entity_params[:source_type]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,62 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
module Workloads
|
||||
# The concept of `Workload` is an abstraction around running arbitrary compute on our `CI::Runners` infrastructure.
|
||||
# Right now this class is simply a wrapper around constructing a `Ci::Pipeline` but we've identified a need in many
|
||||
# parts of the GitLab application to create these workloads. Also see:
|
||||
# 1. https://gitlab.com/gitlab-org/gitlab/-/issues/328489
|
||||
# 2. https://gitlab.com/gitlab-com/content-sites/handbook/-/merge_requests/10811
|
||||
#
|
||||
# In the future it's likely that this class will persist additional models and the concept of a `Workload` may
|
||||
# become first class. For that reason we abstract users from the underlying `Ci::Pipeline` semantics.
|
||||
class RunWorkloadService
|
||||
def initialize(project:, current_user:, source:, workload:, create_branch: false)
|
||||
@project = project
|
||||
@current_user = current_user
|
||||
@source = source
|
||||
@workload = workload
|
||||
@create_branch = create_branch
|
||||
end
|
||||
|
||||
def execute
|
||||
validate_source!
|
||||
ref = @create_branch ? create_repository_branch : default_branch
|
||||
|
||||
service = ::Ci::CreatePipelineService.new(@project, @current_user, ref: ref)
|
||||
service.execute(
|
||||
@source,
|
||||
ignore_skip_ci: true,
|
||||
save_on_errors: false,
|
||||
content: content
|
||||
)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def create_repository_branch
|
||||
branch_name = "workloads/#{SecureRandom.hex[0..10]}"
|
||||
raise "Branch already exists" if @project.repository.branch_exists?(branch_name)
|
||||
|
||||
repo_branch = @project.repository.add_branch(@current_user, branch_name, default_branch)
|
||||
raise "Error in git branch creation" unless repo_branch
|
||||
|
||||
branch_name
|
||||
end
|
||||
|
||||
def content
|
||||
{ workload: @workload.job }.deep_stringify_keys.to_yaml
|
||||
end
|
||||
|
||||
def default_branch
|
||||
@project.default_branch_or_main
|
||||
end
|
||||
|
||||
def validate_source!
|
||||
return if ::Enums::Ci::Pipeline.workload_sources.include?(@source)
|
||||
|
||||
raise ArgumentError, "unsupported source `#{@source}` for workloads"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -61,10 +61,22 @@ module Issues
|
|||
handle_escalation_status_change(issue)
|
||||
create_timeline_event(issue)
|
||||
try_to_associate_contacts(issue)
|
||||
publish_event(issue)
|
||||
|
||||
super
|
||||
end
|
||||
|
||||
def publish_event(issue)
|
||||
event = ::WorkItems::WorkItemCreatedEvent.new(data: {
|
||||
id: issue.id,
|
||||
namespace_id: issue.namespace_id
|
||||
})
|
||||
|
||||
issue.run_after_commit_or_now do
|
||||
::Gitlab::EventStore.publish(event)
|
||||
end
|
||||
end
|
||||
|
||||
def handle_changes(issue, options)
|
||||
super
|
||||
old_associations = options.fetch(:old_associations, {})
|
||||
|
|
|
|||
|
|
@ -110,10 +110,25 @@ module Issues
|
|||
attr_reader :perform_spam_check
|
||||
|
||||
override :after_update
|
||||
def after_update(issue, _old_associations)
|
||||
def after_update(issue, old_associations)
|
||||
super
|
||||
|
||||
GraphqlTriggers.work_item_updated(issue)
|
||||
publish_event(issue, old_associations)
|
||||
end
|
||||
|
||||
def publish_event(work_item, old_associations)
|
||||
event = WorkItems::WorkItemUpdatedEvent.new(data: {
|
||||
id: work_item.id,
|
||||
namespace_id: work_item.namespace_id,
|
||||
previous_work_item_parent_id: old_associations[:work_item_parent_id],
|
||||
updated_attributes: work_item.previous_changes&.keys&.map(&:to_s),
|
||||
updated_widgets: @widget_params&.keys&.map(&:to_s)
|
||||
}.tap(&:compact_blank!))
|
||||
|
||||
work_item.run_after_commit_or_now do
|
||||
Gitlab::EventStore.publish(event)
|
||||
end
|
||||
end
|
||||
|
||||
def handle_date_changes(issue)
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ module MergeRequests
|
|||
|
||||
create_requested_changes(merge_request) if state == 'requested_changes'
|
||||
destroy_requested_changes(merge_request) if state == 'approved'
|
||||
create_reviewed_system_note(merge_request) if state == 'reviewed'
|
||||
|
||||
if reviewer
|
||||
return error("Reviewer has approved") if reviewer.approved? && %w[requested_changes unapproved].exclude?(state)
|
||||
|
|
@ -47,5 +48,17 @@ module MergeRequests
|
|||
def destroy_requested_changes(merge_request)
|
||||
merge_request.destroy_requested_changes(current_user)
|
||||
end
|
||||
|
||||
def create_reviewed_system_note(merge_request)
|
||||
return unless can_leave_reviewed_system_note
|
||||
|
||||
SystemNoteService.reviewed(merge_request, current_user)
|
||||
end
|
||||
|
||||
def can_leave_reviewed_system_note
|
||||
return true unless current_user.respond_to?(:user_type)
|
||||
|
||||
current_user.user_type != 'duo_code_review_bot'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -387,6 +387,10 @@ module SystemNoteService
|
|||
merge_requests_service(noteable, noteable.project, user).requested_changes
|
||||
end
|
||||
|
||||
def reviewed(noteable, user)
|
||||
merge_requests_service(noteable, noteable.project, user).reviewed
|
||||
end
|
||||
|
||||
def change_alert_status(alert, author, reason = nil)
|
||||
::SystemNotes::AlertManagementService.new(noteable: alert, container: alert.project, author: author).change_alert_status(reason)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -210,6 +210,12 @@ module SystemNotes
|
|||
create_note(NoteSummary.new(noteable, project, author, body, action: 'requested_changes'))
|
||||
end
|
||||
|
||||
def reviewed
|
||||
body = "left review comments without approving or requesting changes"
|
||||
|
||||
create_note(NoteSummary.new(noteable, project, author, body, action: 'reviewed'))
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def format_reason(reason)
|
||||
|
|
|
|||
|
|
@ -22,7 +22,6 @@ module WorkItems
|
|||
work_item = result[:issue]
|
||||
|
||||
if work_item.valid?
|
||||
publish_event(work_item)
|
||||
success(payload(work_item))
|
||||
else
|
||||
error(work_item.errors.full_messages, :unprocessable_entity, pass_back: payload(work_item))
|
||||
|
|
@ -48,17 +47,6 @@ module WorkItems
|
|||
def skip_system_notes?
|
||||
false
|
||||
end
|
||||
|
||||
def publish_event(work_item)
|
||||
work_item.run_after_commit_or_now do
|
||||
Gitlab::EventStore.publish(
|
||||
WorkItems::WorkItemCreatedEvent.new(data: {
|
||||
id: work_item.id,
|
||||
namespace_id: work_item.namespace_id
|
||||
})
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -49,7 +49,6 @@ module WorkItems
|
|||
super
|
||||
|
||||
GraphqlTriggers.issuable_title_updated(work_item) if work_item.previous_changes.key?(:title)
|
||||
publish_event(work_item, old_associations)
|
||||
end
|
||||
|
||||
def payload(work_item)
|
||||
|
|
@ -64,20 +63,6 @@ module WorkItems
|
|||
)
|
||||
end
|
||||
|
||||
def publish_event(work_item, old_associations)
|
||||
event = WorkItems::WorkItemUpdatedEvent.new(data: {
|
||||
id: work_item.id,
|
||||
namespace_id: work_item.namespace_id,
|
||||
previous_work_item_parent_id: old_associations[:work_item_parent_id],
|
||||
updated_attributes: work_item.previous_changes&.keys&.map(&:to_s),
|
||||
updated_widgets: @widget_params&.keys&.map(&:to_s)
|
||||
}.tap(&:compact_blank!))
|
||||
|
||||
work_item.run_after_commit_or_now do
|
||||
Gitlab::EventStore.publish(event)
|
||||
end
|
||||
end
|
||||
|
||||
def parent
|
||||
container
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
name: autoflow_issue_events_enabled
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/443486
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/161804
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/516169
|
||||
milestone: '17.9'
|
||||
group: group::environments
|
||||
type: wip
|
||||
default_enabled: false
|
||||
|
|
@ -204,7 +204,7 @@ ci_variables:
|
|||
clusters_managed_resources:
|
||||
- table: p_ci_builds
|
||||
column: build_id
|
||||
on_delete: async_delete
|
||||
on_delete: async_delete
|
||||
- table: ci_builds
|
||||
column: build_id
|
||||
on_delete: async_delete
|
||||
|
|
@ -536,10 +536,6 @@ requirements_management_test_reports:
|
|||
- table: p_ci_builds
|
||||
column: build_id
|
||||
on_delete: async_nullify
|
||||
routes:
|
||||
- table: namespaces
|
||||
column: namespace_id
|
||||
on_delete: async_delete
|
||||
sbom_component_versions:
|
||||
- table: organizations
|
||||
column: organization_id
|
||||
|
|
|
|||
|
|
@ -1117,6 +1117,7 @@ Gitlab.ee do
|
|||
|
||||
Settings.duo_workflow.reverse_merge!(
|
||||
secure: true,
|
||||
debug: false,
|
||||
executor_binary_url: "https://gitlab.com/api/v4/projects/58711783/packages/generic/duo-workflow-executor/#{executor_version}/duo-workflow-executor.tar.gz",
|
||||
executor_binary_urls: executor_binary_urls,
|
||||
executor_version: executor_version
|
||||
|
|
|
|||
|
|
@ -225,6 +225,14 @@
|
|||
- 1
|
||||
- - cluster_agent
|
||||
- 1
|
||||
- - clusters_agents_auto_flow_work_items_closed_event
|
||||
- 1
|
||||
- - clusters_agents_auto_flow_work_items_created_event
|
||||
- 1
|
||||
- - clusters_agents_auto_flow_work_items_reopened_event
|
||||
- 1
|
||||
- - clusters_agents_auto_flow_work_items_updated_event
|
||||
- 1
|
||||
- - compliance_management_chain_of_custody_report
|
||||
- 1
|
||||
- - compliance_management_framework_export_mailer
|
||||
|
|
|
|||
|
|
@ -17,5 +17,4 @@ desired_sharding_key:
|
|||
table: packages_debian_group_components
|
||||
sharding_key: group_id
|
||||
belongs_to: component
|
||||
awaiting_backfill_on_parent: true
|
||||
table_size: small
|
||||
|
|
|
|||
|
|
@ -8,14 +8,6 @@ description: Debian package group-level distribution components
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/51732
|
||||
milestone: '13.9'
|
||||
gitlab_schema: gitlab_main_cell
|
||||
desired_sharding_key:
|
||||
group_id:
|
||||
references: namespaces
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: distribution_id
|
||||
table: packages_debian_group_distributions
|
||||
sharding_key: group_id
|
||||
belongs_to: distribution
|
||||
desired_sharding_key_migration_job_name: BackfillPackagesDebianGroupComponentsGroupId
|
||||
table_size: small
|
||||
sharding_key:
|
||||
group_id: namespaces
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddForeignKeyRoutesNamespaceId < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '17.10'
|
||||
|
||||
SOURCE_TABLE = :routes
|
||||
TARGET_TABLE = :namespaces
|
||||
COLUMN = :namespace_id
|
||||
|
||||
def up
|
||||
add_concurrent_foreign_key(
|
||||
SOURCE_TABLE,
|
||||
TARGET_TABLE,
|
||||
column: COLUMN,
|
||||
validate: false,
|
||||
on_delete: :cascade
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_foreign_key_if_exists(SOURCE_TABLE, TARGET_TABLE, column: COLUMN)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddPackagesDebianGroupComponentsGroupIdNotNullConstraint < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '17.10'
|
||||
|
||||
def up
|
||||
add_not_null_constraint :packages_debian_group_components, :group_id
|
||||
end
|
||||
|
||||
def down
|
||||
remove_not_null_constraint :packages_debian_group_components, :group_id
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
477694af5fbc961b421c1ad53b09b46635eb46d2420b61afadb0eb468cb4e8e9
|
||||
|
|
@ -0,0 +1 @@
|
|||
749dd77d2adcdb7081a8c82fa1c190295ba182f2a8dc6d92055aa122414d97b0
|
||||
|
|
@ -17828,7 +17828,8 @@ CREATE TABLE packages_debian_group_components (
|
|||
distribution_id bigint NOT NULL,
|
||||
name text NOT NULL,
|
||||
group_id bigint,
|
||||
CONSTRAINT check_a9bc7d85be CHECK ((char_length(name) <= 255))
|
||||
CONSTRAINT check_a9bc7d85be CHECK ((char_length(name) <= 255)),
|
||||
CONSTRAINT check_bb77e71a15 CHECK ((group_id IS NOT NULL))
|
||||
);
|
||||
|
||||
CREATE SEQUENCE packages_debian_group_components_id_seq
|
||||
|
|
@ -39383,6 +39384,9 @@ ALTER TABLE ONLY import_placeholder_memberships
|
|||
ALTER TABLE p_ci_builds
|
||||
ADD CONSTRAINT fk_6661f4f0e8 FOREIGN KEY (resource_group_id) REFERENCES ci_resource_groups(id) ON DELETE SET NULL;
|
||||
|
||||
ALTER TABLE ONLY routes
|
||||
ADD CONSTRAINT fk_679ff8213d FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE NOT VALID;
|
||||
|
||||
ALTER TABLE ONLY ai_conversation_messages
|
||||
ADD CONSTRAINT fk_68774ec148 FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
|
|||
|
|
@ -677,7 +677,7 @@ To increase the max attachment size to 200 MB in a
|
|||
If you receive intermittent push errors in your [Rails exceptions log](../logs/_index.md#exceptions_jsonlog), like this:
|
||||
|
||||
```plaintext
|
||||
Your push has been rejected, because this repository has exceeded its size limit.
|
||||
Your push to this repository cannot be completed because this repository has exceeded the allocated storage for your project.
|
||||
```
|
||||
|
||||
[Housekeeping](../housekeeping.md) tasks may be causing your repository size to grow.
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
|
@ -49,7 +49,7 @@ Using the `:latest` tag is **not recommended** as it can cause incompatibility i
|
|||
-e AIGW_GITLAB_URL=<your_gitlab_instance> \
|
||||
-e AIGW_GITLAB_API_URL=https://<your_gitlab_domain>/api/v4/ \
|
||||
registry.gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist/model-gateway:<ai-gateway-tag> \
|
||||
```
|
||||
```
|
||||
|
||||
Replace `<ai-gateway-tag>` with the version that matches your GitLab instance. For example, if your GitLab version is `v17.9.0`, use `self-hosted-v17.9.0-ee`.
|
||||
From the container host, accessing `http://localhost:5052/docs` should open the AI gateway API documentation.
|
||||
|
|
@ -281,3 +281,37 @@ The AI gateway is available in multiple regions globally to ensure optimal perfo
|
|||
- Data sovereignty requirements compliance.
|
||||
|
||||
You should locate your AI gateway in the same geographic region as your GitLab instance to help provide a frictionless developer experience, particularly for latency-sensitive features like Code Suggestions.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### OpenShift Permission Issues
|
||||
|
||||
When deploying the AI gateway on OpenShift, you might encounter permission errors due to OpenShift's security model.
|
||||
|
||||
By default, the AI Gateway uses `/home/aigateway/.hf` for caching HuggingFace models, which may not be writable in OpenShift's
|
||||
security-restricted environment. This can result in permission errors like:
|
||||
|
||||
```shell
|
||||
[Errno 13] Permission denied: '/home/aigateway/.hf/...'
|
||||
```
|
||||
|
||||
To resolve this, set the `HF_HOME` environment variable to a writable location. You can use `/var/tmp/huggingface` or any other directory that is writable by the container.
|
||||
|
||||
You can configure this in either of the following ways:
|
||||
|
||||
- Add to your `values.yaml`:
|
||||
|
||||
```yaml
|
||||
extraEnvironmentVariables:
|
||||
- name: HF_HOME
|
||||
value: /var/tmp/huggingface # Use any writable directory
|
||||
```
|
||||
|
||||
- Or include in your Helm upgrade command:
|
||||
|
||||
```shell
|
||||
--set "extraEnvironmentVariables[0].name=HF_HOME" \
|
||||
--set "extraEnvironmentVariables[0].value=/var/tmp/huggingface" # Use any writable directory
|
||||
```
|
||||
|
||||
This configuration ensures the AI Gateway can properly cache HuggingFace models while respecting OpenShift's security constraints. The exact directory you choose may depend on your specific OpenShift configuration and security policies.
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ You can use either of these words:
|
|||
- `closed`
|
||||
- `closing`
|
||||
|
||||
See also the [Asana integration API documentation](../../../api/integrations.md#asana).
|
||||
See also the [Asana integration API documentation](../../../api/project_integrations.md#asana).
|
||||
|
||||
## Setup
|
||||
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ Read more about the
|
|||
[Source Commits endpoint](https://www.pivotaltracker.com/help/api/rest/v5#Source_Commits) in
|
||||
the Pivotal Tracker API documentation.
|
||||
|
||||
See also the [Pivotal Tracker integration API documentation](../../../api/integrations.md#pivotal-tracker).
|
||||
See also the [Pivotal Tracker integration API documentation](../../../api/project_integrations.md#pivotal-tracker).
|
||||
|
||||
## Set up Pivotal Tracker
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,46 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module ActiveContext
|
||||
module CollectionCache
|
||||
class << self
|
||||
TTL = 1.minute
|
||||
|
||||
def collections
|
||||
refresh_cache if cache_expired?
|
||||
|
||||
@collections ||= {}
|
||||
end
|
||||
|
||||
def fetch(value)
|
||||
by_id(value) || by_name(value)
|
||||
end
|
||||
|
||||
def by_id(id)
|
||||
collections[id]
|
||||
end
|
||||
|
||||
def by_name(name)
|
||||
collections.values.find { |collection| collection.name == name.to_s }
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def cache_expired?
|
||||
return true unless @last_refreshed_at
|
||||
|
||||
Time.current - @last_refreshed_at > TTL
|
||||
end
|
||||
|
||||
def refresh_cache
|
||||
new_collections = {}
|
||||
|
||||
Config.collection_model.find_each do |record|
|
||||
new_collections[record.id] = record
|
||||
end
|
||||
|
||||
@collections = new_collections
|
||||
@last_refreshed_at = Time.current
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -10,6 +10,10 @@ module ActiveContext
|
|||
ActiveContext::Tracker.track!(objects, collection: self)
|
||||
end
|
||||
|
||||
def collection_name
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def queue
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
|
@ -27,6 +31,10 @@ module ActiveContext
|
|||
def reference_klass
|
||||
nil
|
||||
end
|
||||
|
||||
def collection_record
|
||||
ActiveContext::CollectionCache.fetch(collection_name)
|
||||
end
|
||||
end
|
||||
|
||||
attr_reader :object
|
||||
|
|
@ -38,9 +46,10 @@ module ActiveContext
|
|||
def references
|
||||
reference_klasses = Array.wrap(self.class.reference_klasses)
|
||||
routing = self.class.routing(object)
|
||||
collection_id = self.class.collection_record.id
|
||||
|
||||
reference_klasses.map do |reference_klass|
|
||||
reference_klass.serialize(object, routing)
|
||||
reference_klass.serialize(collection_id, routing, object)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ module ActiveContext
|
|||
end
|
||||
|
||||
def push(references)
|
||||
refs_by_shard = references.group_by { |ref| ActiveContext::Shard.shard_number(number_of_shards, ref) }
|
||||
refs_by_shard = references.group_by { |ref| ActiveContext::Hash.consistent_hash(number_of_shards, ref) }
|
||||
|
||||
ActiveContext::Redis.with_redis do |redis|
|
||||
refs_by_shard.each do |shard_number, shard_items|
|
||||
|
|
|
|||
|
|
@ -2,7 +2,15 @@
|
|||
|
||||
module ActiveContext
|
||||
class Config
|
||||
Cfg = Struct.new(:enabled, :databases, :logger, :indexing_enabled, :re_enqueue_indexing_workers, :migrations_path)
|
||||
Cfg = Struct.new(
|
||||
:enabled,
|
||||
:databases,
|
||||
:logger,
|
||||
:indexing_enabled,
|
||||
:re_enqueue_indexing_workers,
|
||||
:migrations_path,
|
||||
:collection_model
|
||||
)
|
||||
|
||||
class << self
|
||||
def configure(&block)
|
||||
|
|
@ -25,6 +33,10 @@ module ActiveContext
|
|||
current.migrations_path || Rails.root.join('ee/db/active_context/migrate')
|
||||
end
|
||||
|
||||
def collection_model
|
||||
current.collection_model || ::Ai::ActiveContext::Collection
|
||||
end
|
||||
|
||||
def logger
|
||||
current.logger || ::Logger.new($stdout)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -20,10 +20,18 @@ module ActiveContext
|
|||
number_of_partitions: number_of_partitions,
|
||||
fields: builder.fields
|
||||
)
|
||||
|
||||
create_collection_record(full_name, number_of_partitions)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def create_collection_record(name, number_of_partitions)
|
||||
collection = Config.collection_model.find_or_initialize_by(name: name)
|
||||
collection.update(number_of_partitions: number_of_partitions)
|
||||
collection.save!
|
||||
end
|
||||
|
||||
def do_create_collection(...)
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,10 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module ActiveContext
|
||||
class Hash
|
||||
def self.consistent_hash(number, data)
|
||||
data = data.to_s unless data.is_a?(String)
|
||||
Digest::SHA256.hexdigest(data).hex % number # rubocop: disable Fips/OpenSSL -- used for data distribution, not for security
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -9,20 +9,18 @@ module ActiveContext
|
|||
|
||||
class << self
|
||||
def deserialize(string)
|
||||
ref_klass = ref_klass(string)
|
||||
|
||||
if ref_klass
|
||||
ref_klass.instantiate(string)
|
||||
else
|
||||
Search::Elastic::Reference.deserialize(string)
|
||||
end
|
||||
ref_klass(string)&.instantiate(string)
|
||||
end
|
||||
|
||||
def instantiate(string)
|
||||
new(*deserialize_string(string))
|
||||
end
|
||||
|
||||
def serialize
|
||||
def serialize(collection_id, routing, data)
|
||||
new(collection_id, routing, *serialize_data(data)).serialize
|
||||
end
|
||||
|
||||
def serialize_data
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
|
|
@ -35,11 +33,29 @@ module ActiveContext
|
|||
end
|
||||
end
|
||||
|
||||
attr_reader :collection_id, :collection, :routing, :serialized_args
|
||||
|
||||
def initialize(collection_id, routing, *serialized_args)
|
||||
@collection_id = collection_id.to_i
|
||||
@collection = ActiveContext::CollectionCache.fetch(@collection_id)
|
||||
@routing = routing
|
||||
@serialized_args = serialized_args
|
||||
init
|
||||
end
|
||||
|
||||
def klass
|
||||
self.class.klass
|
||||
end
|
||||
|
||||
def serialize
|
||||
self.class.join_delimited([collection_id, routing, serialize_arguments].flatten.compact)
|
||||
end
|
||||
|
||||
def init
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def serialize_arguments
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
|
|
@ -51,16 +67,16 @@ module ActiveContext
|
|||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def partition_name
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def identifier
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def routing
|
||||
nil
|
||||
def partition_name
|
||||
collection.name
|
||||
end
|
||||
|
||||
def partition_number
|
||||
collection.partition_for(routing)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module ActiveContext
|
||||
class Shard
|
||||
def self.shard_number(number_of_shards, data)
|
||||
Digest::SHA256.hexdigest(data).hex % number_of_shards # rubocop: disable Fips/OpenSSL -- used for data distribution, not for security
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -36,7 +36,7 @@ RSpec.describe ActiveContext::Concerns::Queue do
|
|||
it 'pushes references to Redis' do
|
||||
references = %w[ref1 ref2 ref3]
|
||||
|
||||
allow(ActiveContext::Shard).to receive(:shard_number).and_return(0, 1, 0)
|
||||
allow(ActiveContext::Hash).to receive(:consistent_hash).and_return(0, 1, 0)
|
||||
expect(redis_double).to receive(:incrby).with('mockmodule:{test_queue}:0:score', 2).and_return(2)
|
||||
expect(redis_double).to receive(:incrby).with('mockmodule:{test_queue}:1:score', 1).and_return(1)
|
||||
expect(redis_double).to receive(:zadd).with('mockmodule:{test_queue}:0:zset', [[1, 'ref1'], [2, 'ref3']])
|
||||
|
|
|
|||
|
|
@ -72,6 +72,32 @@ RSpec.describe ActiveContext::Config do
|
|||
end
|
||||
end
|
||||
|
||||
describe '.collection_model' do
|
||||
before do
|
||||
stub_const('Ai::ActiveContext::Collection', Class.new)
|
||||
end
|
||||
|
||||
context 'when collection_model is not set' do
|
||||
it 'returns the default model' do
|
||||
expect(described_class.collection_model).to eq(::Ai::ActiveContext::Collection)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when collection_model is set' do
|
||||
let(:custom_model) { Class.new }
|
||||
|
||||
before do
|
||||
described_class.configure do |config|
|
||||
config.collection_model = custom_model
|
||||
end
|
||||
end
|
||||
|
||||
it 'returns the configured collection model' do
|
||||
expect(described_class.collection_model).to eq(custom_model)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '.logger' do
|
||||
context 'when logger is not set' do
|
||||
it 'returns a default stdout logger' do
|
||||
|
|
|
|||
|
|
@ -23,9 +23,8 @@ RSpec.describe ActiveContext::Reference do
|
|||
stub_const('Search::Elastic::Reference', Class.new)
|
||||
end
|
||||
|
||||
it 'falls back to Search::Elastic::Reference.deserialize' do
|
||||
expect(Search::Elastic::Reference).to receive(:deserialize).with('test|string')
|
||||
described_class.deserialize('test|string')
|
||||
it 'returns nil' do
|
||||
expect(described_class.deserialize('test|string')).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -45,12 +44,6 @@ RSpec.describe ActiveContext::Reference do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#klass' do
|
||||
it 'returns the demodulized class name' do
|
||||
expect(described_class.new.klass).to eq('Reference')
|
||||
end
|
||||
end
|
||||
|
||||
describe 'ReferenceUtils methods' do
|
||||
describe '.delimit' do
|
||||
it 'splits the string by the delimiter' do
|
||||
|
|
|
|||
|
|
@ -30,18 +30,6 @@ RSpec.describe ActiveContext::Tracker do
|
|||
expect(mock_queue).to contain_exactly(['test_string'])
|
||||
end
|
||||
|
||||
it 'serializes ActiveContext::Reference objects' do
|
||||
reference_class = Class.new(ActiveContext::Reference) do
|
||||
def serialize
|
||||
'serialized_reference'
|
||||
end
|
||||
end
|
||||
reference = reference_class.new
|
||||
|
||||
expect(described_class.track!(reference, collection: mock_collection)).to eq(1)
|
||||
expect(mock_queue).to contain_exactly(['serialized_reference'])
|
||||
end
|
||||
|
||||
it 'uses collection.references for other objects' do
|
||||
obj = double('SomeObject')
|
||||
collection_instance = instance_double('CollectionInstance')
|
||||
|
|
|
|||
|
|
@ -0,0 +1,70 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ai
|
||||
module DuoWorkflows
|
||||
class Workload < ::Ci::Workloads::Workload
|
||||
MAX_RUNTIME = 2.hours
|
||||
IMAGE = 'registry.gitlab.com/gitlab-org/duo-workflow/default-docker-image/workflow-generic-image:v0.0.4'
|
||||
|
||||
def initialize(current_user, params)
|
||||
@params = params
|
||||
@current_user = current_user
|
||||
end
|
||||
|
||||
def job
|
||||
{
|
||||
image: IMAGE,
|
||||
script: commands,
|
||||
timeout: "#{MAX_RUNTIME} seconds",
|
||||
variables: variables_without_expand,
|
||||
artifacts: {
|
||||
paths: artifacts_path
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
def artifacts_path
|
||||
['workflow.diff']
|
||||
end
|
||||
|
||||
def variables_without_expand
|
||||
# We set expand: false so that there is no way for user inputs (e.g. the goal) to expand out other variables
|
||||
variables.transform_values do |v|
|
||||
{ value: v, expand: false }
|
||||
end
|
||||
end
|
||||
|
||||
def variables
|
||||
{
|
||||
DUO_WORKFLOW_BASE_PATH: './',
|
||||
DUO_WORKFLOW_GOAL: @params[:goal],
|
||||
DUO_WORKFLOW_WORKFLOW_ID: String(@params[:workflow_id]),
|
||||
GITLAB_OAUTH_TOKEN: @params[:workflow_oauth_token],
|
||||
DUO_WORKFLOW_SERVICE_SERVER: Gitlab::DuoWorkflow::Client.url,
|
||||
DUO_WORKFLOW_SERVICE_TOKEN: @params[:workflow_service_token],
|
||||
DUO_WORKFLOW_SERVICE_REALM: ::CloudConnector.gitlab_realm,
|
||||
DUO_WORKFLOW_GLOBAL_USER_ID: Gitlab::GlobalAnonymousId.user_id(@current_user),
|
||||
DUO_WORKFLOW_INSTANCE_ID: Gitlab::GlobalAnonymousId.instance_id,
|
||||
DUO_WORKFLOW_INSECURE: Gitlab::DuoWorkflow::Client.secure? ? 'false' : 'true',
|
||||
DUO_WORKFLOW_DEBUG: Gitlab::DuoWorkflow::Client.debug_mode? ? 'true' : 'false',
|
||||
DUO_WORKFLOW_GIT_HTTP_BASE_URL: Gitlab.config.gitlab.url,
|
||||
DUO_WORKFLOW_GIT_HTTP_PASSWORD: @params[:workflow_oauth_token],
|
||||
DUO_WORKFLOW_GIT_HTTP_USER: "oauth",
|
||||
GITLAB_BASE_URL: Gitlab.config.gitlab.url
|
||||
}
|
||||
end
|
||||
|
||||
def commands
|
||||
[
|
||||
%(wget #{Gitlab::DuoWorkflow::Executor.executor_binary_url} -O /tmp/duo-workflow-executor.tar.gz),
|
||||
%(tar xf /tmp/duo-workflow-executor.tar.gz --directory /tmp),
|
||||
%(chmod +x /tmp/duo-workflow-executor),
|
||||
%(/tmp/duo-workflow-executor),
|
||||
%(git add .),
|
||||
%(git diff --staged),
|
||||
%(git diff --staged > workflow.diff)
|
||||
]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -372,7 +372,7 @@ module API
|
|||
end
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
post ':id/issues/:issue_iid/move' do
|
||||
Gitlab::QueryLimiting.disable!('https://gitlab.com/gitlab-org/gitlab/-/issues/20776', new_threshold: 205)
|
||||
Gitlab::QueryLimiting.disable!('https://gitlab.com/gitlab-org/gitlab/-/issues/20776', new_threshold: 250)
|
||||
|
||||
issue = user_project.issues.find_by(iid: params[:issue_iid])
|
||||
not_found!('Issue') unless issue
|
||||
|
|
|
|||
|
|
@ -0,0 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Ci
|
||||
module Workloads
|
||||
class Workload
|
||||
def job
|
||||
raise "not implemented"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -20,6 +20,11 @@ module Gitlab
|
|||
# - scoped_variables_for_pipeline_seed
|
||||
def scoped_variables(job, environment:, dependencies:)
|
||||
Gitlab::Ci::Variables::Collection.new.tap do |variables|
|
||||
if pipeline.disable_all_except_yaml_variables?
|
||||
variables.concat(job.yaml_variables)
|
||||
next
|
||||
end
|
||||
|
||||
variables.concat(predefined_variables(job, environment))
|
||||
variables.concat(project.predefined_variables)
|
||||
variables.concat(pipeline_variables_builder.predefined_variables)
|
||||
|
|
@ -39,6 +44,11 @@ module Gitlab
|
|||
|
||||
def unprotected_scoped_variables(job, expose_project_variables:, expose_group_variables:, environment:, dependencies:)
|
||||
Gitlab::Ci::Variables::Collection.new.tap do |variables|
|
||||
if pipeline.disable_all_except_yaml_variables?
|
||||
variables.concat(job.yaml_variables)
|
||||
next
|
||||
end
|
||||
|
||||
variables.concat(predefined_variables(job, environment))
|
||||
variables.concat(project.predefined_variables)
|
||||
variables.concat(pipeline_variables_builder.predefined_variables)
|
||||
|
|
@ -58,6 +68,11 @@ module Gitlab
|
|||
|
||||
def scoped_variables_for_pipeline_seed(job_attr, environment:, kubernetes_namespace:, user:, trigger_request:)
|
||||
Gitlab::Ci::Variables::Collection.new.tap do |variables|
|
||||
if pipeline.disable_all_except_yaml_variables?
|
||||
variables.concat(job_attr[:yaml_variables])
|
||||
next
|
||||
end
|
||||
|
||||
variables.concat(predefined_variables_from_job_attr(job_attr, environment, trigger_request))
|
||||
variables.concat(project.predefined_variables)
|
||||
variables.concat(pipeline_variables_builder.predefined_variables)
|
||||
|
|
@ -78,6 +93,7 @@ module Gitlab
|
|||
def config_variables
|
||||
Gitlab::Ci::Variables::Collection.new.tap do |variables|
|
||||
break variables unless project
|
||||
next if pipeline.disable_all_except_yaml_variables?
|
||||
|
||||
variables.concat(project.predefined_variables)
|
||||
variables.concat(pipeline_variables_builder.predefined_variables)
|
||||
|
|
|
|||
|
|
@ -94,6 +94,16 @@ module Gitlab
|
|||
end
|
||||
|
||||
def self.skip_transaction_check?
|
||||
# When transactional tests are in use, Rails calls
|
||||
# ConnectionPool#lock_thread= to ensure all application threads
|
||||
# get the same connection so they can all see the data in the
|
||||
# uncommited transaction. If Puma is in use, check the state of
|
||||
# the lock thread.
|
||||
if ::Rails.env.test?
|
||||
lock_thread = ::ApplicationRecord.connection_pool.instance_variable_get(:@lock_thread)
|
||||
return true if lock_thread && lock_thread[:skip_transaction_check_for_exclusive_lease]
|
||||
end
|
||||
|
||||
Thread.current[:skip_transaction_check_for_exclusive_lease]
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -119,7 +119,9 @@ module Gitlab
|
|||
# name - The name of the feature flag, e.g. `my_feature`.
|
||||
# enabled - Boolean to be pushed directly to the frontend. Should be fetched by checking a feature flag.
|
||||
def push_force_frontend_feature_flag(name, enabled)
|
||||
push_to_gon_attributes(:features, name, !!enabled)
|
||||
raise ArgumentError, 'enabled flag must be a Boolean' unless enabled.in?([true, false])
|
||||
|
||||
push_to_gon_attributes(:features, name, enabled)
|
||||
end
|
||||
|
||||
def push_namespace_setting(key, object)
|
||||
|
|
|
|||
|
|
@ -19,20 +19,16 @@ module Gitlab
|
|||
"This merge request cannot be merged, #{base_message}"
|
||||
end
|
||||
|
||||
def push_error(change_size = 0)
|
||||
"Your push has been rejected, #{base_message(change_size)}. #{more_info_message}"
|
||||
def push_error
|
||||
"Your push to this repository cannot be completed #{base_message}. #{more_info_message}"
|
||||
end
|
||||
|
||||
def new_changes_error
|
||||
if additional_repo_storage_available?
|
||||
"Your push to this repository has been rejected because it would exceed storage limits. #{more_info_message}"
|
||||
else
|
||||
"Your push to this repository would cause it to exceed the size limit of #{formatted(limit)} so it has been rejected. #{more_info_message}"
|
||||
end
|
||||
"Your push to this repository cannot be completed as it would exceed the allocated storage for your project. #{more_info_message}"
|
||||
end
|
||||
|
||||
def more_info_message
|
||||
'Please contact your GitLab administrator for more information.'
|
||||
'Contact your GitLab administrator for more information.'
|
||||
end
|
||||
|
||||
def above_size_limit_message
|
||||
|
|
@ -41,8 +37,8 @@ module Gitlab
|
|||
|
||||
private
|
||||
|
||||
def base_message(change_size = 0)
|
||||
"because this repository has exceeded its size limit of #{formatted(limit)} by #{formatted(exceeded_size(change_size))}"
|
||||
def base_message
|
||||
"because this repository has exceeded the allocated storage for your project"
|
||||
end
|
||||
|
||||
def formatted(number)
|
||||
|
|
|
|||
|
|
@ -36653,7 +36653,7 @@ msgstr ""
|
|||
msgid "MlModelRegistry|Create your machine learning using GitLab directly or using the MLflow client"
|
||||
msgstr ""
|
||||
|
||||
msgid "MlModelRegistry|Create/Import"
|
||||
msgid "MlModelRegistry|Create/Import model"
|
||||
msgstr ""
|
||||
|
||||
msgid "MlModelRegistry|Creating a model"
|
||||
|
|
@ -36746,6 +36746,9 @@ msgstr ""
|
|||
msgid "MlModelRegistry|Failed to load model with error: %{message}"
|
||||
msgstr ""
|
||||
|
||||
msgid "MlModelRegistry|Failed to load models with error: %{message}"
|
||||
msgstr ""
|
||||
|
||||
msgid "MlModelRegistry|File \"%{name}\" is %{size}. It is larger than max allowed size of %{maxAllowedFileSize}"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -36911,9 +36914,6 @@ msgstr ""
|
|||
msgid "MlModelRegistry|Subfolder cannot contain spaces"
|
||||
msgstr ""
|
||||
|
||||
msgid "MlModelRegistry|This model has no runs"
|
||||
msgstr ""
|
||||
|
||||
msgid "MlModelRegistry|Total versions"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -36953,6 +36953,9 @@ msgstr ""
|
|||
msgid "MlModelRegistry|folder name"
|
||||
msgstr ""
|
||||
|
||||
msgid "MlModelRegistry|version"
|
||||
msgstr ""
|
||||
|
||||
msgid "Mock an external CI integration."
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ module Gitlab
|
|||
},
|
||||
kas: {
|
||||
minReplicas: 1,
|
||||
resources: resources("10m", "45Mi")
|
||||
resources: resources("100m", "256Mi")
|
||||
},
|
||||
gitlab_shell: {
|
||||
minReplicas: 1,
|
||||
|
|
@ -84,7 +84,7 @@ module Gitlab
|
|||
},
|
||||
kas: {
|
||||
minReplicas: 1,
|
||||
resources: resources("40m", "64Mi"),
|
||||
resources: resources("100m", "256Mi"),
|
||||
hpa: cpu_utilization
|
||||
},
|
||||
gitlab_shell: {
|
||||
|
|
|
|||
|
|
@ -145,7 +145,7 @@ module Gitlab
|
|||
#
|
||||
# @return [Boolean]
|
||||
def cluster_exists?
|
||||
execute_shell(%w[kind get clusters]).include?(name)
|
||||
execute_shell(%w[kind get clusters]).split("\n").any? { |line| line.strip == name }
|
||||
end
|
||||
|
||||
# Create kind config file and return it's path
|
||||
|
|
|
|||
|
|
@ -26,8 +26,8 @@ RSpec.describe Gitlab::Cng::Deployment::ResourcePresets do
|
|||
kas: {
|
||||
minReplicas: 1,
|
||||
resources: {
|
||||
requests: { cpu: "10m", memory: "45Mi" },
|
||||
limits: { cpu: "10m", memory: "45Mi" }
|
||||
requests: { cpu: "100m", memory: "256Mi" },
|
||||
limits: { cpu: "100m", memory: "256Mi" }
|
||||
}
|
||||
},
|
||||
gitlab_shell: {
|
||||
|
|
@ -100,8 +100,8 @@ RSpec.describe Gitlab::Cng::Deployment::ResourcePresets do
|
|||
kas: {
|
||||
minReplicas: 1,
|
||||
resources: {
|
||||
requests: { cpu: "40m", memory: "64Mi" },
|
||||
limits: { cpu: "40m", memory: "64Mi" }
|
||||
requests: { cpu: "100m", memory: "256Mi" },
|
||||
limits: { cpu: "100m", memory: "256Mi" }
|
||||
},
|
||||
hpa: {
|
||||
cpu: {
|
||||
|
|
|
|||
|
|
@ -188,7 +188,6 @@ spec/frontend/ml/model_registry/components/candidate_detail_spec.js
|
|||
spec/frontend/ml/model_registry/components/model_edit_spec.js
|
||||
spec/frontend/ml/model_registry/components/model_version_create_spec.js
|
||||
spec/frontend/notebook/cells/markdown_spec.js
|
||||
spec/frontend/notebook/cells/output/html_spec.js
|
||||
spec/frontend/notes/components/discussion_notes_spec.js
|
||||
spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js
|
||||
spec/frontend/packages_and_registries/dependency_proxy/app_spec.js
|
||||
|
|
|
|||
|
|
@ -13,10 +13,11 @@ describe('DiffsFileTree', () => {
|
|||
|
||||
let wrapper;
|
||||
|
||||
const createComponent = ({ visible = true } = {}) => {
|
||||
const createComponent = ({ visible = true, ...rest } = {}) => {
|
||||
wrapper = shallowMount(DiffsFileTree, {
|
||||
propsData: {
|
||||
visible,
|
||||
...rest,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
|
@ -131,4 +132,10 @@ describe('DiffsFileTree', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('passes down loadedFiles table to tree list', () => {
|
||||
const loadedFiles = { foo: true };
|
||||
createComponent({ loadedFiles });
|
||||
expect(wrapper.findComponent(TreeList).props('loadedFiles')).toBe(loadedFiles);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -23,10 +23,10 @@ describe('Diffs tree list component', () => {
|
|||
|
||||
Vue.use(Vuex);
|
||||
|
||||
const createComponent = ({ hideFileStats = false } = {}) => {
|
||||
const createComponent = ({ hideFileStats = false, ...rest } = {}) => {
|
||||
wrapper = shallowMountExtended(TreeList, {
|
||||
store,
|
||||
propsData: { hideFileStats },
|
||||
propsData: { hideFileStats, ...rest },
|
||||
stubs: {
|
||||
// eslint will fail if we import the real component
|
||||
RecycleScroller: stubComponent(
|
||||
|
|
@ -37,7 +37,8 @@ describe('Diffs tree list component', () => {
|
|||
},
|
||||
},
|
||||
{
|
||||
template: '<div><slot :item="{ tree: [] }"></slot></div>',
|
||||
template:
|
||||
'<div><template v-for="item in items"><slot :item="item"></slot></template></div>',
|
||||
},
|
||||
),
|
||||
},
|
||||
|
|
@ -423,4 +424,37 @@ describe('Diffs tree list component', () => {
|
|||
},
|
||||
);
|
||||
});
|
||||
|
||||
describe('loading state', () => {
|
||||
const getLoadedFiles = (offset = 1) =>
|
||||
store.state.diffs.tree.slice(offset).reduce((acc, el) => {
|
||||
acc[el.fileHash] = true;
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
beforeEach(() => {
|
||||
setupFilesInState();
|
||||
});
|
||||
|
||||
it('sets loading state for loading files', () => {
|
||||
const loadedFiles = getLoadedFiles();
|
||||
createComponent({ loadedFiles });
|
||||
const [firstItem, secondItem] = getScroller().props('items');
|
||||
expect(firstItem.loading).toBe(true);
|
||||
expect(secondItem.loading).toBe(false);
|
||||
});
|
||||
|
||||
it('is not focusable', () => {
|
||||
const loadedFiles = getLoadedFiles();
|
||||
createComponent({ loadedFiles });
|
||||
expect(wrapper.findAllComponents(DiffFileRow).at(0).attributes('tabindex')).toBe('-1');
|
||||
});
|
||||
|
||||
it('ignores clicks on loading files', () => {
|
||||
const loadedFiles = getLoadedFiles();
|
||||
createComponent({ loadedFiles });
|
||||
wrapper.findAllComponents(DiffFileRow).at(0).vm.$emit('clickFile', {});
|
||||
expect(wrapper.emitted('clickFile')).toBe(undefined);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import App from '~/import/fogbugz/app.vue';
|
||||
import ImportFromFogbugzApp from '~/import/fogbugz/import_from_fogbugz_app.vue';
|
||||
import MultiStepFormTemplate from '~/vue_shared/components/multi_step_form_template.vue';
|
||||
|
||||
describe('Import from FugBugz app', () => {
|
||||
let wrapper;
|
||||
|
||||
const createComponent = () => {
|
||||
wrapper = shallowMountExtended(App, {
|
||||
wrapper = shallowMountExtended(ImportFromFogbugzApp, {
|
||||
propsData: {
|
||||
backButtonPath: '/projects/new#import_project',
|
||||
formPath: '/import/fogbugz/callback',
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
import { nextTick } from 'vue';
|
||||
import { GlFormInput } from '@gitlab/ui';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import ImportFromGiteaRoot from '~/import/gitea/import_from_gitea_root.vue';
|
||||
import ImportFromGiteaApp from '~/import/gitea/import_from_gitea_app.vue';
|
||||
import MultiStepFormTemplate from '~/vue_shared/components/multi_step_form_template.vue';
|
||||
|
||||
jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
|
||||
|
|
@ -16,7 +16,7 @@ describe('Import from Gitea app', () => {
|
|||
};
|
||||
|
||||
const createComponent = () => {
|
||||
wrapper = shallowMountExtended(ImportFromGiteaRoot, {
|
||||
wrapper = shallowMountExtended(ImportFromGiteaApp, {
|
||||
propsData: {
|
||||
...defaultProps,
|
||||
},
|
||||
|
|
@ -130,7 +130,7 @@ describe('ml/model_registry/apps/index_ml_models', () => {
|
|||
|
||||
it('error message is displayed', () => {
|
||||
expect(findSearchableTable().props('errorMessage')).toBe(
|
||||
'Failed to load model with error: Failure!',
|
||||
'Failed to load models with error: Failure!',
|
||||
);
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -135,7 +135,7 @@ describe('ml/model_registry/apps/show_model_version.vue', () => {
|
|||
it('renders the title', () => {
|
||||
createWrapper();
|
||||
|
||||
expect(findTitleArea().props('title')).toBe('blah / 1.2.3');
|
||||
expect(findTitleArea().props('title')).toBe('blah / version 1.2.3');
|
||||
});
|
||||
|
||||
describe('Model version edit button', () => {
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ export default [
|
|||
'\n',
|
||||
'</style>',
|
||||
].join(''),
|
||||
output: '<!---->',
|
||||
output: '<style type="text/css">',
|
||||
},
|
||||
],
|
||||
// Note: iframe is sanitized out
|
||||
|
|
@ -84,7 +84,7 @@ export default [
|
|||
' ></iframe>\n',
|
||||
' ',
|
||||
].join(''),
|
||||
output: '<!---->',
|
||||
output: 'src="https://www.youtube.com/embed/sjfsUzECqK0"',
|
||||
},
|
||||
],
|
||||
[
|
||||
|
|
|
|||
|
|
@ -70,6 +70,7 @@ describe('system note component', () => {
|
|||
'issues',
|
||||
'error',
|
||||
'review-warning',
|
||||
'comment-lines',
|
||||
];
|
||||
createComponent(props);
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,12 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
RSpec.describe Ci::Workloads::Workload, feature_category: :continuous_integration do
|
||||
subject(:workload) { described_class.new }
|
||||
|
||||
describe '#job' do
|
||||
it 'needs to be implemented' do
|
||||
expect { workload.job }.to raise_error(RuntimeError, "not implemented")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -313,6 +313,41 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when pipeline disables all except yaml variables' do
|
||||
before do
|
||||
pipeline.source = :duo_workflow
|
||||
end
|
||||
|
||||
it 'only includes the YAML defined variables' do
|
||||
expect(subject).to contain_exactly(
|
||||
Gitlab::Ci::Variables::Collection::Item.fabricate({ key: 'YAML_VARIABLE', value: 'value' })
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#unprotected_scoped_variables' do
|
||||
let(:expose_project_variables) { true }
|
||||
let(:expose_group_variables) { true }
|
||||
let(:environment_name) { job.expanded_environment_name }
|
||||
let(:dependencies) { true }
|
||||
|
||||
subject { builder.unprotected_scoped_variables(job, expose_project_variables: expose_project_variables, expose_group_variables: expose_group_variables, environment: environment_name, dependencies: dependencies) }
|
||||
|
||||
it { is_expected.to be_instance_of(Gitlab::Ci::Variables::Collection) }
|
||||
|
||||
context 'when pipeline disables all except yaml variables' do
|
||||
before do
|
||||
pipeline.source = :duo_workflow
|
||||
end
|
||||
|
||||
it 'only includes the YAML defined variables' do
|
||||
expect(subject).to contain_exactly(
|
||||
Gitlab::Ci::Variables::Collection::Item.fabricate({ key: 'YAML_VARIABLE', value: 'value' })
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#scoped_variables_for_pipeline_seed' do
|
||||
|
|
@ -558,6 +593,18 @@ RSpec.describe Gitlab::Ci::Variables::Builder, :clean_gitlab_redis_cache, featur
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when pipeline disables all except yaml variables' do
|
||||
before do
|
||||
pipeline.source = :duo_workflow
|
||||
end
|
||||
|
||||
it 'only includes the YAML defined variables' do
|
||||
expect(subject).to contain_exactly(
|
||||
Gitlab::Ci::Variables::Collection::Item.fabricate({ key: 'YAML_VARIABLE', value: 'value' })
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#user_variables' do
|
||||
|
|
|
|||
|
|
@ -42,6 +42,20 @@ RSpec.describe Gitlab::ExclusiveLease, :request_store,
|
|||
expect { lease_attempt }.to raise_error(Gitlab::ExclusiveLease::LeaseWithinTransactionError)
|
||||
end
|
||||
end
|
||||
|
||||
it 'allows the operation if lock thread is set' do
|
||||
described_class.skipping_transaction_check do
|
||||
thread = Thread.new do
|
||||
Thread.current.abort_on_exception = true
|
||||
|
||||
ApplicationRecord.transaction do
|
||||
expect { lease_attempt }.not_to raise_error
|
||||
end
|
||||
end
|
||||
|
||||
thread.join
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'in production environment' do
|
||||
|
|
|
|||
|
|
@ -335,7 +335,7 @@ RSpec.describe Gitlab::GitAccessSnippet do
|
|||
|
||||
expect do
|
||||
push_access_check
|
||||
end.to raise_error(described_class::ForbiddenError, /Your push has been rejected/)
|
||||
end.to raise_error(described_class::ForbiddenError, /Your push to this repository cannot be completed/)
|
||||
end
|
||||
|
||||
it_behaves_like 'migration bot does not err'
|
||||
|
|
@ -365,7 +365,7 @@ RSpec.describe Gitlab::GitAccessSnippet do
|
|||
|
||||
expect do
|
||||
push_access_check
|
||||
end.to raise_error(described_class::ForbiddenError, /Your push to this repository would cause it to exceed the size limit/)
|
||||
end.to raise_error(described_class::ForbiddenError, /Your push to this repository cannot be completed as it would exceed the allocated storage for your project/)
|
||||
end
|
||||
|
||||
it_behaves_like 'migration bot does not err'
|
||||
|
|
|
|||
|
|
@ -169,7 +169,7 @@ RSpec.describe Gitlab::GonHelper, feature_category: :shared do
|
|||
.and_return(gon)
|
||||
end
|
||||
|
||||
it 'pushes a feature flag to the frontend with the provided value' do
|
||||
it 'pushes a feature flag to the frontend with a true value' do
|
||||
expect(gon)
|
||||
.to receive(:push)
|
||||
.with({ features: { 'myFeatureFlag' => true } }, true)
|
||||
|
|
@ -177,12 +177,16 @@ RSpec.describe Gitlab::GonHelper, feature_category: :shared do
|
|||
helper.push_force_frontend_feature_flag(:my_feature_flag, true)
|
||||
end
|
||||
|
||||
it 'pushes a disabled feature flag if provided value is nil' do
|
||||
it 'pushes a feature flag to the frontend with a false value' do
|
||||
expect(gon)
|
||||
.to receive(:push)
|
||||
.with({ features: { 'myFeatureFlag' => false } }, true)
|
||||
|
||||
helper.push_force_frontend_feature_flag(:my_feature_flag, nil)
|
||||
helper.push_force_frontend_feature_flag(:my_feature_flag, false)
|
||||
end
|
||||
|
||||
it 'raises an ArgumentError if argument is not a boolean' do
|
||||
expect { helper.push_force_frontend_feature_flag(:my_feature_flag, Object.new) }.to raise_error ArgumentError
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ RSpec.describe Gitlab::RepositorySizeErrorMessage do
|
|||
end
|
||||
|
||||
let(:message) { checker.error_message }
|
||||
let(:base_message) { 'because this repository has exceeded its size limit of 10 MiB by 5 MiB' }
|
||||
let(:base_message) { 'because this repository has exceeded the allocated storage for your project' }
|
||||
|
||||
before do
|
||||
allow(namespace).to receive(:total_repository_size_excess).and_return(0)
|
||||
|
|
@ -36,19 +36,19 @@ RSpec.describe Gitlab::RepositorySizeErrorMessage do
|
|||
describe '#push_error' do
|
||||
context 'with exceeded_limit value' do
|
||||
let(:rejection_message) do
|
||||
'because this repository has exceeded its size limit of 10 MiB by 15 MiB'
|
||||
'because this repository has exceeded the allocated storage for your project'
|
||||
end
|
||||
|
||||
it 'returns the correct message' do
|
||||
expect(message.push_error(10.megabytes))
|
||||
.to eq("Your push has been rejected, #{rejection_message}. #{message.more_info_message}")
|
||||
expect(message.push_error)
|
||||
.to eq("Your push to this repository cannot be completed #{rejection_message}. #{message.more_info_message}")
|
||||
end
|
||||
end
|
||||
|
||||
context 'without exceeded_limit value' do
|
||||
it 'returns the correct message' do
|
||||
expect(message.push_error)
|
||||
.to eq("Your push has been rejected, #{base_message}. #{message.more_info_message}")
|
||||
.to eq("Your push to this repository cannot be completed #{base_message}. #{message.more_info_message}")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -58,13 +58,7 @@ RSpec.describe Gitlab::RepositorySizeErrorMessage do
|
|||
it 'returns the correct message' do
|
||||
allow(checker).to receive(:additional_repo_storage_available?).and_return(true)
|
||||
|
||||
expect(message.new_changes_error).to eq('Your push to this repository has been rejected because it would exceed storage limits. Please contact your GitLab administrator for more information.')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when no additional repo storage is available' do
|
||||
it 'returns the correct message' do
|
||||
expect(message.new_changes_error).to eq("Your push to this repository would cause it to exceed the size limit of 10 MiB so it has been rejected. #{message.more_info_message}")
|
||||
expect(message.new_changes_error).to eq('Your push to this repository cannot be completed as it would exceed the allocated storage for your project. Contact your GitLab administrator for more information.')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe BulkImport, type: :model, feature_category: :importers do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
let_it_be(:created_bulk_import) { create(:bulk_import, :created, updated_at: 2.hours.ago) }
|
||||
let_it_be(:started_bulk_import) { create(:bulk_import, :started, updated_at: 3.hours.ago) }
|
||||
let_it_be(:finished_bulk_import) { create(:bulk_import, :finished, updated_at: 1.hour.ago) }
|
||||
|
|
@ -228,6 +230,34 @@ RSpec.describe BulkImport, type: :model, feature_category: :importers do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#source_equals_destination?' do
|
||||
subject(:bulk_import) do
|
||||
build_stubbed(:bulk_import,
|
||||
configuration: build_stubbed(:bulk_import_configuration, url: source_url)
|
||||
)
|
||||
end
|
||||
|
||||
before do
|
||||
allow(Settings.gitlab).to receive(:host).and_return('gitlab.example')
|
||||
end
|
||||
|
||||
where(:source_url, :value) do
|
||||
'https://gitlab.example' | true
|
||||
'https://gitlab.example:443' | true
|
||||
'https://gitlab.example/' | true
|
||||
'https://gitlab.example/dir' | true
|
||||
'http://gitlab.example' | true
|
||||
'https://gitlab.example2' | false
|
||||
'https://subdomain.example' | false
|
||||
'https://subdomain.gitlab.example' | false
|
||||
'http://192.168.1.1' | false
|
||||
end
|
||||
|
||||
with_them do
|
||||
it { expect(bulk_import.source_equals_destination?).to eq(value) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#namespaces_with_unassigned_placeholders' do
|
||||
let_it_be(:group) { create(:group) }
|
||||
let_it_be(:entity) do
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers do
|
||||
subject { described_class.new(group: Group.new) }
|
||||
subject { build(:bulk_import_entity, group: Group.new) }
|
||||
|
||||
describe 'associations' do
|
||||
it { is_expected.to belong_to(:bulk_import).required }
|
||||
|
|
@ -173,36 +173,64 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
|
|||
end
|
||||
|
||||
context 'validate destination namespace of a group_entity' do
|
||||
it 'is invalid if destination namespace is the source namespace' do
|
||||
group_a = create(:group, path: 'group_a')
|
||||
|
||||
entity = build(
|
||||
:bulk_import_entity,
|
||||
:group_entity,
|
||||
source_full_path: group_a.full_path,
|
||||
destination_namespace: group_a.full_path
|
||||
)
|
||||
|
||||
expect(entity).not_to be_valid
|
||||
expect(entity.errors).to include(:base)
|
||||
expect(entity.errors[:base])
|
||||
.to include('Import failed: Destination cannot be a subgroup of the source group. Change the destination and try again.')
|
||||
let_it_be(:bulk_import) do
|
||||
create(:bulk_import, configuration: create(:bulk_import_configuration, url: 'http://example.gitlab.com'))
|
||||
end
|
||||
|
||||
it 'is invalid if destination namespace is a descendant of the source' do
|
||||
group_a = create(:group, path: 'group_a')
|
||||
group_b = create(:group, parent: group_a, path: 'group_b')
|
||||
context 'when source instance and destination instance are the same' do
|
||||
before do
|
||||
allow(Settings.gitlab).to receive(:host).and_return('example.gitlab.com')
|
||||
end
|
||||
|
||||
entity = build(
|
||||
:bulk_import_entity,
|
||||
:group_entity,
|
||||
source_full_path: group_a.full_path,
|
||||
destination_namespace: group_b.full_path
|
||||
)
|
||||
it 'is invalid if destination namespace is the source namespace' do
|
||||
group_a = create(:group, path: 'group_a')
|
||||
|
||||
expect(entity).not_to be_valid
|
||||
expect(entity.errors[:base])
|
||||
.to include('Import failed: Destination cannot be a subgroup of the source group. Change the destination and try again.')
|
||||
entity = build(
|
||||
:bulk_import_entity,
|
||||
:group_entity,
|
||||
source_full_path: group_a.full_path,
|
||||
destination_namespace: group_a.full_path,
|
||||
bulk_import: bulk_import
|
||||
)
|
||||
|
||||
expect(entity).not_to be_valid
|
||||
expect(entity.errors).to include(:base)
|
||||
expect(entity.errors[:base])
|
||||
.to include('Import failed: Destination cannot be a subgroup of the source group. Change the destination and try again.')
|
||||
end
|
||||
|
||||
it 'is invalid if destination namespace is a descendant of the source' do
|
||||
group_a = create(:group, path: 'group_a')
|
||||
group_b = create(:group, parent: group_a, path: 'group_b')
|
||||
|
||||
entity = build(
|
||||
:bulk_import_entity,
|
||||
:group_entity,
|
||||
source_full_path: group_a.full_path,
|
||||
destination_namespace: group_b.full_path,
|
||||
bulk_import: bulk_import
|
||||
)
|
||||
|
||||
expect(entity).not_to be_valid
|
||||
expect(entity.errors[:base])
|
||||
.to include('Import failed: Destination cannot be a subgroup of the source group. Change the destination and try again.')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when source instance and destination instance are not the same' do
|
||||
it 'is valid if destination namespace is the source namespace' do
|
||||
group_a = create(:group, path: 'group_a')
|
||||
|
||||
entity = build(
|
||||
:bulk_import_entity,
|
||||
:group_entity,
|
||||
source_full_path: group_a.full_path,
|
||||
destination_namespace: group_a.full_path,
|
||||
bulk_import: bulk_import
|
||||
)
|
||||
|
||||
expect(entity).to be_valid
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue