Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-05-08 21:15:10 +00:00
parent 3a563d7c1e
commit 7db94a9807
107 changed files with 2110 additions and 485 deletions

View File

@ -14,17 +14,14 @@
## Moving docs to a new location?
Read the guidelines:
https://docs.gitlab.com/ee/development/documentation/index.html#move-or-rename-a-page
Read the [redirect guidelines](https://docs.gitlab.com/ee/development/documentation/redirects.html) first.
- [ ] Make sure the old link is not removed and has its contents replaced with
a link to the new location.
- [ ] Make sure internal links pointing to the document in question are not broken.
- [ ] Search and replace any links referring to old docs in GitLab Rails app,
specifically under the `app/views/` and `ee/app/views` (for GitLab EE) directories.
- [ ] Make sure to add [`redirect_from`](https://docs.gitlab.com/ee/development/documentation/index.html#redirections-for-pages-with-disqus-comments)
to the new document if there are any Disqus comments on the old document thread.
- [ ] Update the link in `features.yml` (if applicable).
- [ ] Update the link in [`features.yml`](https://gitlab.com/gitlab-com/www-gitlab-com/-/blob/master/data/features.yml) (if applicable).
- [ ] Assign one of the technical writers for review.
/label ~documentation ~"Technical Writing"
/label ~documentation ~"Technical Writing" ~"type::maintenance" ~"maintenance::refactor"

View File

@ -1 +1 @@
4.3.0
4.3.1

View File

@ -587,6 +587,9 @@ gem 'cvss-suite', '~> 3.0.1', require: 'cvss_suite'
# Work with RPM packages
gem 'arr-pm', '~> 0.0.12'
# Remote Development
gem 'devfile', '~> 0.0.17.pre.alpha1'
# Apple plist parsing
gem 'CFPropertyList', '~> 3.0.0'
gem 'app_store_connect'

View File

@ -109,6 +109,9 @@
{"name":"deprecation_toolkit","version":"1.5.1","platform":"ruby","checksum":"a8a1ab1a19ae40ea12560b65010e099f3459ebde390b76621ef0c21c516a04ba"},
{"name":"derailed_benchmarks","version":"2.1.2","platform":"ruby","checksum":"eaadc6206ceeb5538ff8f5e04a0023d54ebdd95d04f33e8960fb95a5f189a14f"},
{"name":"descendants_tracker","version":"0.0.4","platform":"ruby","checksum":"e9c41dd4cfbb85829a9301ea7e7c48c2a03b26f09319db230e6479ccdc780897"},
{"name":"devfile","version":"0.0.17.pre.alpha1","platform":"arm64-darwin","checksum":"a6e4d970914399a3acce38d81c42ba5b98f849d878031ff83decd6575369d0c3"},
{"name":"devfile","version":"0.0.17.pre.alpha1","platform":"ruby","checksum":"2855e7513ab8322e456d3080bf2449109cf4a5785e262443128db0ebf48e646c"},
{"name":"devfile","version":"0.0.17.pre.alpha1","platform":"x86_64-linux","checksum":"da045e7cbeb2f0685b9b6c7f3d54147403720dced01f727e2f8ca53cef333eaa"},
{"name":"device_detector","version":"1.0.0","platform":"ruby","checksum":"b800fb3150b00c23e87b6768011808ac1771fffaae74c3238ebaf2b782947a7d"},
{"name":"devise","version":"4.8.1","platform":"ruby","checksum":"fdd48bbe79a89e7c1152236a70479842ede48bea4fa7f4f2d8da1f872559803e"},
{"name":"devise-two-factor","version":"4.0.2","platform":"ruby","checksum":"6548d2696ed090d27046f888f4fa7380f151e0f823902d46fd9b91e7d0cac511"},
@ -211,7 +214,7 @@
{"name":"gitlab-experiment","version":"0.7.1","platform":"ruby","checksum":"166dddb3aa83428bcaa93c35684ed01dc4d61f321fd2ae40b020806dc54a7824"},
{"name":"gitlab-fog-azure-rm","version":"1.7.0","platform":"ruby","checksum":"969c67943c54ad4c259a6acd040493f13922fbdf2211bb4eca00e71505263dc2"},
{"name":"gitlab-labkit","version":"0.31.1","platform":"ruby","checksum":"3e3a39370966b5d2739c2d9d9005c0ea27541d32cb7292e856e8bd74c720bffb"},
{"name":"gitlab-license","version":"2.2.1","platform":"ruby","checksum":"39fcf6be8b2887df8afe01b5dcbae8d08b7c5d937ff56b0fb40484a8c4f02d30"},
{"name":"gitlab-license","version":"2.2.2","platform":"ruby","checksum":"2ccbc763828d013524b0b3b9ee671e58d5277693e5ffb2e5463cbac87e8aed1e"},
{"name":"gitlab-mail_room","version":"0.0.23","platform":"ruby","checksum":"23564fa4dab24ec5011d4c64a801fc0228301d5b0f046a26a1d8e96e36c19997"},
{"name":"gitlab-markup","version":"1.9.0","platform":"ruby","checksum":"7eda045a08ec2d110084252fa13a8c9eac8bdac0e302035ca7db4b82bcbd7ed4"},
{"name":"gitlab-net-dns","version":"0.9.2","platform":"ruby","checksum":"f726d978479d43810819f12a45c0906d775a07e34df111bbe693fffbbef3059d"},

View File

@ -375,6 +375,7 @@ GEM
thor (>= 0.19, < 2)
descendants_tracker (0.0.4)
thread_safe (~> 0.3, >= 0.3.1)
devfile (0.0.17.pre.alpha1)
device_detector (1.0.0)
devise (4.8.1)
bcrypt (~> 3.0)
@ -617,7 +618,7 @@ GEM
opentracing (~> 0.4)
pg_query (~> 2.1)
redis (> 3.0.0, < 6.0.0)
gitlab-license (2.2.1)
gitlab-license (2.2.2)
gitlab-mail_room (0.0.23)
jwt (>= 2.0)
net-imap (>= 0.2.1)
@ -1713,6 +1714,7 @@ DEPENDENCIES
declarative_policy (~> 1.1.0)
deprecation_toolkit (~> 1.5.1)
derailed_benchmarks
devfile (~> 0.0.17.pre.alpha1)
device_detector
devise (~> 4.8.1)
devise-pbkdf2-encryptable (~> 0.0.0)!

View File

@ -9,7 +9,7 @@ import PathNavigation from '~/analytics/cycle_analytics/components/path_navigati
import StageTable from '~/analytics/cycle_analytics/components/stage_table.vue';
import ValueStreamFilters from '~/analytics/cycle_analytics/components/value_stream_filters.vue';
import UrlSync from '~/vue_shared/components/url_sync.vue';
import { __ } from '~/locale';
import { __, s__ } from '~/locale';
import { SUMMARY_METRICS_REQUEST, METRICS_REQUESTS } from '../constants';
const OVERVIEW_DIALOG_COOKIE = 'cycle_analytics_help_dismissed';
@ -79,7 +79,9 @@ export default {
}
return this.selectedStageError
? this.selectedStageError
: __("We don't have enough data to show this stage.");
: s__(
'ValueStreamAnalyticsStage|There are 0 items to show in this stage, for these filters, within this time range.',
);
},
emptyStageText() {
if (this.displayNoAccess) {

View File

@ -14,7 +14,7 @@ export const DEFAULT_VALUE_STREAM = {
};
export const NOT_ENOUGH_DATA_ERROR = s__(
"ValueStreamAnalyticsStage|We don't have enough data to show this stage.",
'ValueStreamAnalyticsStage|There are 0 items to show in this stage, for these filters, within this time range.',
);
export const PAGINATION_TYPE = 'keyset';

View File

@ -38,6 +38,14 @@ const VSA_FLOW_METRICS_GROUP = {
export const VSA_METRICS_GROUPS = [VSA_FLOW_METRICS_GROUP];
export const VULNERABILITY_CRITICAL_TYPE = 'vulnerability_critical';
export const VULNERABILITY_HIGH_TYPE = 'vulnerability_high';
export const VULNERABILITY_METRICS = {
CRITICAL: VULNERABILITY_CRITICAL_TYPE,
HIGH: VULNERABILITY_HIGH_TYPE,
};
export const METRIC_TOOLTIPS = {
[DORA_METRICS.DEPLOYMENT_FREQUENCY]: {
description: s__(
@ -101,6 +109,18 @@ export const METRIC_TOOLTIPS = {
projectLink: '-/analytics/merge_request_analytics',
docsLink: helpPagePath('user/analytics/merge_request_analytics'),
},
[VULNERABILITY_METRICS.CRITICAL]: {
description: s__('ValueStreamAnalytics|Total Critical vulnerabilities.'),
groupLink: '-/security/vulnerabilities',
projectLink: '-/security/vulnerability_report',
docsLink: helpPagePath('user/application_security/vulnerability_report/index'),
},
[VULNERABILITY_METRICS.HIGH]: {
description: s__('ValueStreamAnalytics|Total High vulnerabilities.'),
groupLink: '-/security/vulnerabilities',
projectLink: '-/security/vulnerability_report',
docsLink: helpPagePath('user/application_security/vulnerability_report/index'),
},
};
// TODO: Remove this once the migration to METRIC_TOOLTIPS is complete

View File

@ -18,12 +18,8 @@ export default {
variables() {
return { projectPath: this.projectPath };
},
update({
project: {
statistics: { buildArtifactsSize },
},
}) {
return buildArtifactsSize;
update({ project: { statistics } }) {
return statistics?.buildArtifactsSize ?? null;
},
},
},

View File

@ -108,6 +108,7 @@ export default {
@cluster-error="onClusterError" />
<kubernetes-tabs
:configuration="k8sAccessConfiguration"
:namespace="namespace"
class="gl-mb-5"
@cluster-error="onClusterError"
/></template>

View File

@ -0,0 +1,180 @@
<script>
import { GlTab, GlLoadingIcon, GlBadge } from '@gitlab/ui';
import { s__ } from '~/locale';
import k8sWorkloadsQuery from '../graphql/queries/k8s_workloads.query.graphql';
import {
getDeploymentsStatuses,
getDaemonSetStatuses,
getStatefulSetStatuses,
getReplicaSetStatuses,
getJobsStatuses,
getCronJobsStatuses,
} from '../helpers/k8s_integration_helper';
export default {
components: {
GlTab,
GlBadge,
GlLoadingIcon,
},
apollo: {
k8sWorkloads: {
query: k8sWorkloadsQuery,
variables() {
return {
configuration: this.configuration,
namespace: this.namespace,
};
},
update(data) {
return data?.k8sWorkloads || {};
},
error(error) {
this.$emit('cluster-error', error);
},
},
},
props: {
configuration: {
required: true,
type: Object,
},
namespace: {
required: true,
type: String,
},
},
computed: {
summaryLoading() {
return this.$apollo.queries.k8sWorkloads.loading;
},
summaryCount() {
return this.k8sWorkloads ? Object.values(this.k8sWorkloads).flat().length : 0;
},
summaryObjects() {
return [
this.deploymentsItems,
this.daemonSetsItems,
this.statefulSetItems,
this.replicaSetItems,
this.jobItems,
this.cronJobItems,
].filter(Boolean);
},
deploymentsItems() {
const items = this.k8sWorkloads?.DeploymentList;
if (!items?.length) {
return null;
}
return {
name: this.$options.i18n.deployments,
items: getDeploymentsStatuses(items),
};
},
daemonSetsItems() {
const items = this.k8sWorkloads?.DaemonSetList;
if (!items?.length) {
return null;
}
return {
name: this.$options.i18n.daemonSets,
items: getDaemonSetStatuses(items),
};
},
statefulSetItems() {
const items = this.k8sWorkloads?.StatefulSetList;
if (!items?.length) {
return null;
}
return {
name: this.$options.i18n.statefulSets,
items: getStatefulSetStatuses(items),
};
},
replicaSetItems() {
const items = this.k8sWorkloads?.ReplicaSetList;
if (!items?.length) {
return null;
}
return {
name: this.$options.i18n.replicaSets,
items: getReplicaSetStatuses(items),
};
},
jobItems() {
const items = this.k8sWorkloads?.JobList;
if (!items?.length) {
return null;
}
return {
name: this.$options.i18n.jobs,
items: getJobsStatuses(items),
};
},
cronJobItems() {
const items = this.k8sWorkloads?.CronJobList;
if (!items?.length) {
return null;
}
return {
name: this.$options.i18n.cronJobs,
items: getCronJobsStatuses(items),
};
},
},
i18n: {
summaryTitle: s__('Environment|Summary'),
deployments: s__('Environment|Deployments'),
daemonSets: s__('Environment|DaemonSets'),
statefulSets: s__('Environment|StatefulSets'),
replicaSets: s__('Environment|ReplicaSets'),
jobs: s__('Environment|Jobs'),
cronJobs: s__('Environment|CronJobs'),
},
badgeVariants: {
ready: 'success',
completed: 'success',
failed: 'danger',
suspended: 'neutral',
},
icons: {
Active: { icon: 'status_success', class: 'gl-text-green-500' },
},
};
</script>
<template>
<gl-tab>
<template #title>
{{ $options.i18n.summaryTitle }}
<gl-badge size="sm" class="gl-tab-counter-badge">{{ summaryCount }}</gl-badge>
</template>
<gl-loading-icon v-if="summaryLoading" />
<ul v-else class="gl-mt-3 gl-list-style-none gl-bg-white gl-pl-0 gl-mb-0">
<li
v-for="object in summaryObjects"
:key="object.name"
class="gl-display-flex gl-align-items-center gl-p-3 gl-border-t gl-text-gray-700"
data-testid="summary-list-item"
>
<div class="gl-flex-grow-1">{{ object.name }}</div>
<gl-badge
v-for="(item, key) in object.items"
:key="key"
:variant="$options.badgeVariants[key]"
size="sm"
class="gl-ml-2"
>{{ item.length }} {{ key }}</gl-badge
>
</li>
</ul>
</gl-tab>
</template>

View File

@ -4,6 +4,7 @@ import { __, s__ } from '~/locale';
import k8sServicesQuery from '../graphql/queries/k8s_services.query.graphql';
import { generateServicePortsString, getServiceAge } from '../helpers/k8s_integration_helper';
import { SERVICES_LIMIT_PER_PAGE } from '../constants';
import KubernetesSummary from './kubernetes_summary.vue';
const tableHeadingClasses = 'gl-bg-gray-50! gl-font-weight-bold gl-white-space-nowrap';
@ -15,6 +16,7 @@ export default {
GlTable,
GlPagination,
GlLoadingIcon,
KubernetesSummary,
},
apollo: {
k8sServices: {
@ -37,6 +39,10 @@ export default {
required: true,
type: Object,
},
namespace: {
required: true,
type: String,
},
},
data() {
return {
@ -128,6 +134,8 @@ export default {
</script>
<template>
<gl-tabs>
<kubernetes-summary :namespace="namespace" :configuration="configuration" />
<gl-tab>
<template #title>
{{ $options.i18n.servicesTitle }}

View File

@ -7,6 +7,7 @@ import environmentToRollbackQuery from './queries/environment_to_rollback.query.
import environmentToStopQuery from './queries/environment_to_stop.query.graphql';
import k8sPodsQuery from './queries/k8s_pods.query.graphql';
import k8sServicesQuery from './queries/k8s_services.query.graphql';
import k8sWorkloadsQuery from './queries/k8s_workloads.query.graphql';
import { resolvers } from './resolvers';
import typeDefs from './typedefs.graphql';
@ -109,6 +110,57 @@ export const apolloProvider = (endpoint) => {
},
},
});
cache.writeQuery({
query: k8sWorkloadsQuery,
data: {
DeploymentList: {
status: {
conditions: [],
},
},
DaemonSetList: {
status: {
numberMisscheduled: 0,
numberReady: 0,
desiredNumberScheduled: 0,
},
},
StatefulSetList: {
status: {
readyReplicas: 0,
},
spec: {
replicas: 0,
},
},
ReplicaSetList: {
status: {
readyReplicas: 0,
},
spec: {
replicas: 0,
},
},
JobList: {
status: {
failed: 0,
succeeded: 0,
},
spec: {
completions: 0,
},
},
CronJobList: {
status: {
active: 0,
lastScheduleTime: '',
},
spec: {
suspend: false,
},
},
},
});
return new VueApollo({
defaultClient,
});

View File

@ -0,0 +1,50 @@
query getK8sWorkloads($configuration: LocalConfiguration, $namespace: String) {
k8sWorkloads(configuration: $configuration, namespace: $namespace) @client {
DeploymentList {
status {
conditions
}
}
DaemonSetList {
status {
numberMisscheduled
numberReady
desiredNumberScheduled
}
}
StatefulSetList {
status {
readyReplicas
}
spec {
replicas
}
}
ReplicaSetList {
status {
readyReplicas
}
spec {
replicas
}
}
JobList {
status {
failed
succeeded
}
spec {
completions
}
}
CronJobList {
status {
active
lastScheduleTime
}
spec {
suspend
}
}
}
}

View File

@ -1,4 +1,4 @@
import { CoreV1Api, Configuration } from '@gitlab/cluster-client';
import { CoreV1Api, Configuration, AppsV1Api, BatchV1Api } from '@gitlab/cluster-client';
import axios from '~/lib/utils/axios_utils';
import { s__ } from '~/locale';
import {
@ -29,6 +29,49 @@ const mapEnvironment = (env) => ({
__typename: 'LocalEnvironment',
});
const mapWorkloadItems = (items, kind) => {
return items.map((item) => {
const updatedItem = {
status: {},
spec: {},
};
switch (kind) {
case 'DeploymentList':
updatedItem.status.conditions = item.status.conditions || [];
break;
case 'DaemonSetList':
updatedItem.status = {
numberMisscheduled: item.status.numberMisscheduled || 0,
numberReady: item.status.numberReady || 0,
desiredNumberScheduled: item.status.desiredNumberScheduled || 0,
};
break;
case 'StatefulSetList':
case 'ReplicaSetList':
updatedItem.status.readyReplicas = item.status.readyReplicas || 0;
updatedItem.spec.replicas = item.spec.replicas || 0;
break;
case 'JobList':
updatedItem.status.failed = item.status.failed || 0;
updatedItem.status.succeeded = item.status.succeeded || 0;
updatedItem.spec.completions = item.spec.completions || 0;
break;
case 'CronJobList':
updatedItem.status.active = item.status.active || 0;
updatedItem.status.lastScheduleTime = item.status.lastScheduleTime || '';
updatedItem.spec.suspend = item.spec.suspend || 0;
break;
default:
updatedItem.status = item?.status;
updatedItem.spec = item?.spec;
break;
}
return updatedItem;
});
};
export const resolvers = (endpoint) => ({
Query: {
environmentApp(_context, { page, scope, search }, { cache }) {
@ -109,6 +152,60 @@ export const resolvers = (endpoint) => ({
throw error;
});
},
k8sWorkloads(_, { configuration, namespace }) {
const appsV1api = new AppsV1Api(configuration);
const batchV1api = new BatchV1Api(configuration);
let promises;
if (namespace) {
promises = [
appsV1api.listAppsV1NamespacedDeployment(namespace),
appsV1api.listAppsV1NamespacedDaemonSet(namespace),
appsV1api.listAppsV1NamespacedStatefulSet(namespace),
appsV1api.listAppsV1NamespacedReplicaSet(namespace),
batchV1api.listBatchV1NamespacedJob(namespace),
batchV1api.listBatchV1NamespacedCronJob(namespace),
];
} else {
promises = [
appsV1api.listAppsV1DeploymentForAllNamespaces(),
appsV1api.listAppsV1DaemonSetForAllNamespaces(),
appsV1api.listAppsV1StatefulSetForAllNamespaces(),
appsV1api.listAppsV1ReplicaSetForAllNamespaces(),
batchV1api.listBatchV1JobForAllNamespaces(),
batchV1api.listBatchV1CronJobForAllNamespaces(),
];
}
const summaryList = {
DeploymentList: [],
DaemonSetList: [],
StatefulSetList: [],
ReplicaSetList: [],
JobList: [],
CronJobList: [],
};
return Promise.allSettled(promises).then((results) => {
if (results.every((res) => res.status === 'rejected')) {
const error = results[0].reason;
const errorMessage = error?.response?.data?.message ?? error;
throw new Error(errorMessage);
}
for (const promiseResult of results) {
if (promiseResult.status === 'fulfilled' && promiseResult?.value?.data) {
const { kind, items } = promiseResult.value.data;
if (items?.length > 0) {
summaryList[kind] = mapWorkloadItems(items, kind);
}
}
}
return summaryList;
});
},
},
Mutation: {
stopEnvironmentREST(_, { environment }, { client }) {

View File

@ -93,6 +93,74 @@ type LocalK8sServices {
spec: k8sServiceSpec
}
type k8sDeploymentStatus {
conditions: JSON
}
type localK8sDeployment {
status: k8sDeploymentStatus
}
type k8sDaemonSetStatus {
IntMisscheduled: Int
IntReady: Int
desiredIntScheduled: Int
}
type localK8sDaemonSet {
status: k8sDaemonSetStatus
}
type k8sSetStatus {
readyReplicas: Int
}
type k8sSetSpec {
replicas: Int
}
type localK8sSet {
status: k8sSetStatus
spec: k8sSetSpec
}
type k8sJobStatus {
failed: Int
succeeded: Int
}
type k8sJobSpec {
completions: Int
}
type localK8sJob {
status: k8sJobStatus
spec: k8sJobSpec
}
type k8sCronJobStatus {
active: Int
lastScheduleTime: String
}
type k8sCronJobSpec {
suspend: Boolean
}
type localK8sCronJob {
status: k8sCronJobStatus
spec: k8sCronJobSpec
}
type LocalK8sWorkloads {
DeploymentList: [localK8sDeployment]
DaemonSetList: [localK8sDaemonSet]
StatefulSetList: [localK8sSet]
ReplicaSetList: [localK8sSet]
JobList: [localK8sJob]
CronJobList: [localK8sCronJob]
}
extend type Query {
environmentApp(page: Int, scope: String): LocalEnvironmentApp
folder(environment: NestedLocalEnvironmentInput): LocalEnvironmentFolder
@ -104,6 +172,7 @@ extend type Query {
isLastDeployment(environment: LocalEnvironmentInput): Boolean
k8sPods(configuration: LocalConfiguration, namespace: String): [LocalK8sPods]
k8sServices(configuration: LocalConfiguration): [LocalK8sServices]
k8sWorkloads(configuration: LocalConfiguration, namespace: String): LocalK8sWorkloads
}
extend type Mutation {

View File

@ -34,3 +34,108 @@ export function getServiceAge(creationTimestamp) {
return ageString;
}
export function getDeploymentsStatuses(items) {
const failed = [];
const ready = [];
items.forEach((item) => {
const [available, progressing] = item.status?.conditions ?? [];
// eslint-disable-next-line @gitlab/require-i18n-strings
if (available.status === 'True') {
ready.push(item);
// eslint-disable-next-line @gitlab/require-i18n-strings
} else if (available.status !== 'True' && progressing.status !== 'True') {
failed.push(item);
}
});
return {
...(failed.length && { failed }),
...(ready.length && { ready }),
};
}
export function getDaemonSetStatuses(items) {
const failed = items.filter((item) => {
return (
item.status?.numberMisscheduled > 0 ||
item.status?.numberReady !== item.status?.desiredNumberScheduled
);
});
const ready = items.filter((item) => {
return (
item.status?.numberReady === item.status?.desiredNumberScheduled &&
!item.status?.numberMisscheduled
);
});
return {
...(failed.length && { failed }),
...(ready.length && { ready }),
};
}
export function getStatefulSetStatuses(items) {
const failed = items.filter((item) => {
return item.status?.readyReplicas < item.spec?.replicas;
});
const ready = items.filter((item) => {
return item.status?.readyReplicas === item.spec?.replicas;
});
return {
...(failed.length && { failed }),
...(ready.length && { ready }),
};
}
export function getReplicaSetStatuses(items) {
const failed = items.filter((item) => {
return item.status?.readyReplicas < item.spec?.replicas;
});
const ready = items.filter((item) => {
return item.status?.readyReplicas === item.spec?.replicas;
});
return {
...(failed.length && { failed }),
...(ready.length && { ready }),
};
}
export function getJobsStatuses(items) {
const failed = items.filter((item) => {
return item.status.failed > 0 || item.status?.succeeded !== item.spec?.completions;
});
const completed = items.filter((item) => {
return item.status?.succeeded === item.spec?.completions;
});
return {
...(failed.length && { failed }),
...(completed.length && { completed }),
};
}
export function getCronJobsStatuses(items) {
const failed = [];
const ready = [];
const suspended = [];
items.forEach((item) => {
if (item.status?.active > 0 && !item.status?.lastScheduleTime) {
failed.push(item);
} else if (item.spec?.suspend) {
suspended.push(item);
} else if (item.status?.lastScheduleTime) {
ready.push(item);
}
});
return {
...(failed.length && { failed }),
...(suspended.length && { suspended }),
...(ready.length && { ready }),
};
}

View File

@ -451,3 +451,21 @@
color: $gl-text-color;
}
}
@mixin omniauth-divider {
&::before,
&::after {
content: '';
flex: 1;
border-bottom: 1px solid var(--gray-100, $gray-100);
margin: $gl-padding-24 0;
}
&::before {
margin-right: $gl-padding;
}
&::after {
margin-left: $gl-padding;
}
}

View File

@ -221,6 +221,10 @@
color: $red-700;
}
}
.omniauth-divider {
@include omniauth-divider;
}
}
@include media-breakpoint-down(xs) {

View File

@ -9,21 +9,7 @@
}
.omniauth-divider {
&::before,
&::after {
content: '';
flex: 1;
border-bottom: 1px solid var(--gray-100, $gray-100);
margin: $gl-padding-24 0;
}
&::before {
margin-right: $gl-padding;
}
&::after {
margin-left: $gl-padding;
}
@include omniauth-divider;
}
.decline-page {

View File

@ -360,11 +360,6 @@ input.btn-block[type="button"] {
align-items: center;
justify-content: space-between;
}
.clearfix::after {
display: block;
clear: both;
content: "";
}
.fixed-top {
position: fixed;
top: 0;
@ -783,11 +778,8 @@ svg {
.gl-display-inline-block {
display: inline-block;
}
.gl-flex-wrap {
flex-wrap: wrap;
}
.gl-justify-content-center {
justify-content: center;
.gl-align-items-center {
align-items: center;
}
.gl-justify-content-space-between {
justify-content: space-between;
@ -801,9 +793,6 @@ svg {
.gl-w-half {
width: 50%;
}
.gl-w-90p {
width: 90%;
}
.gl-w-full {
width: 100%;
}
@ -812,9 +801,6 @@ svg {
width: 100%;
}
}
.gl-p-5 {
padding: 1rem;
}
.gl-px-5 {
padding-left: 1rem;
padding-right: 1rem;
@ -822,6 +808,13 @@ svg {
.gl-pt-5 {
padding-top: 1rem;
}
.gl-pb-5 {
padding-bottom: 1rem;
}
.gl-py-5 {
padding-top: 1rem;
padding-bottom: 1rem;
}
.gl-mt-3 {
margin-top: 0.5rem;
}
@ -831,9 +824,6 @@ svg {
.gl-mr-auto {
margin-right: auto;
}
.gl-mr-2 {
margin-right: 0.25rem;
}
.gl-mb-1 {
margin-bottom: 0.125rem;
}
@ -846,9 +836,6 @@ svg {
.gl-ml-auto {
margin-left: auto;
}
.gl-ml-2 {
margin-left: 0.25rem;
}
@media (min-width: 576px) {
.gl-sm-mt-0 {
margin-top: 0;
@ -860,9 +847,6 @@ svg {
.gl-font-size-h2 {
font-size: 1.1875rem;
}
.gl-font-weight-normal {
font-weight: 400;
}
.gl-font-weight-bold {
font-weight: 600;
}

View File

@ -8,6 +8,9 @@ module Mutations
argument :issues_sort, Types::IssueSortEnum,
required: false,
description: 'Sort order for issue lists.'
argument :visibility_pipeline_id_type, Types::VisibilityPipelineIdTypeEnum,
required: false,
description: 'Determines whether the pipeline list shows ID or IID.'
field :user_preferences,
Types::UserPreferencesType,

View File

@ -175,3 +175,5 @@ module Types
end
end
end
Types::UserInterface.prepend_mod

View File

@ -10,6 +10,10 @@ module Types
description: 'Sort order for issue lists.',
null: true
field :visibility_pipeline_id_type, Types::VisibilityPipelineIdTypeEnum,
description: 'Determines whether the pipeline list shows ID or IID.',
null: true
def issues_sort
object.issues_sort.to_sym
end

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
module Types
class VisibilityPipelineIdTypeEnum < BaseEnum
graphql_name 'VisibilityPipelineIdType'
description 'Determines whether the pipeline list shows ID or IID'
UserPreference.visibility_pipeline_id_types.each_key do |field|
value field.upcase, value: field, description: "Display pipeline #{field.upcase}."
end
end
end

View File

@ -9,6 +9,12 @@ class Vulnerability < ApplicationRecord
scope :with_projects, -> { includes(:project) }
# Policy class inferring logic is causing performance
# issues therefore we need to explicitly set it.
def self.declarative_policy_class
:VulnerabilityPolicy
end
def self.link_reference_pattern
nil
end

View File

@ -19,5 +19,4 @@
= _('No authentication methods configured.')
- if omniauth_enabled? && button_based_providers_enabled?
.clearfix
= render 'devise/shared/omniauth_box', render_remember_me: false
= render 'devise/shared/omniauth_box', render_remember_me: false

View File

@ -21,8 +21,8 @@
.gl-px-5
= recaptcha_tags nonce: content_security_policy_nonce
.submit-container.move-submit-down.gl-px-5
.submit-container.move-submit-down.gl-px-5.gl-pb-5
= f.button _('Sign in'), type: :submit, class: "gl-button btn btn-block btn-confirm js-sign-in-button#{' js-no-auto-disable' if Feature.enabled?(:arkose_labs_login_challenge)}", data: { qa_selector: 'sign_in_button', testid: 'sign-in-button' }
- if Gitlab::CurrentSettings.sign_in_text.present? && Feature.enabled?(:restyle_login_page, @project)
.gl-px-5
- if Gitlab::CurrentSettings.sign_in_text.present? && Feature.enabled?(:restyle_login_page, @project)
.gl-px-5
= markdown_field(Gitlab::CurrentSettings.current_application_settings, :sign_in_text)

View File

@ -26,5 +26,4 @@
= _("Don't have an account yet?")
= link_to _("Register now"), new_registration_path(:user, invite_email: @invite_email), data: { qa_selector: 'register_link' }
- if omniauth_enabled? && devise_mapping.omniauthable? && button_based_providers_enabled?
.clearfix
= render 'devise/shared/omniauth_box'
= render 'devise/shared/omniauth_box'

View File

@ -1,17 +1,21 @@
- render_remember_me = remember_me_enabled? && local_assigns.fetch(:render_remember_me, true)
- restyle_login_page_enabled = Feature.enabled?(:restyle_login_page, @project)
%div{ class: restyle_login_page_enabled ? 'omniauth-container gl-mt-5 gl-p-5 gl-text-center gl-w-90p gl-ml-auto gl-mr-auto' : 'omniauth-container gl-mt-5 gl-p-5' }
%label{ class: restyle_login_page_enabled ? 'gl-font-weight-normal' : 'gl-font-weight-bold' }
= _('Sign in with')
- providers = enabled_button_based_providers
.gl-display-flex.gl-flex-wrap{ class: restyle_login_page_enabled ? 'gl-justify-content-center' : 'gl-justify-content-between' }
- providers.each do |provider|
- has_icon = provider_has_icon?(provider)
= button_to omniauth_authorize_path(:user, provider), id: "oauth-login-#{provider}", data: { qa_selector: "#{qa_selector_for_provider(provider)}" }, class: "btn gl-button btn-default gl-ml-2 gl-mr-2 gl-mb-2 js-oauth-login #{'gl-w-full' unless restyle_login_page_enabled}", form: { class: restyle_login_page_enabled ? 'gl-mb-3' : 'gl-w-full gl-mb-3' } do
- if has_icon
= provider_image_tag(provider)
%span.gl-button-text
= label_for_provider(provider)
- if restyle_login_page_enabled && (any_form_based_providers_enabled? || password_authentication_enabled_for_web?)
.omniauth-divider.gl-display-flex.gl-align-items-center
= _("or")
.gl-mt-5.gl-px-5{ class: restyle_login_page_enabled ? 'omniauth-container gl-text-center gl-ml-auto gl-mr-auto' : 'omniauth-container gl-py-5' }
- if !restyle_login_page_enabled
%label.gl-font-weight-bold
= _('Sign in with')
- enabled_button_based_providers.each do |provider|
- has_icon = provider_has_icon?(provider)
= button_to omniauth_authorize_path(:user, provider), id: "oauth-login-#{provider}", data: { qa_selector: "#{qa_selector_for_provider(provider)}" }, class: "btn gl-button btn-default gl-mb-2 js-oauth-login gl-w-full", form: { class: restyle_login_page_enabled ? 'gl-mb-3' : 'gl-w-full gl-mb-3' } do
- if has_icon
= provider_image_tag(provider)
%span.gl-button-text
= label_for_provider(provider)
- if render_remember_me
= render Pajamas::CheckboxTagComponent.new(name: 'remember_me_omniauth', value: nil) do |c|
= c.label do

View File

@ -3,9 +3,9 @@
.gl-text-center.gl-pt-5
%label.gl-font-weight-normal
= _("Register with:")
.gl-text-center.gl-w-90p.gl-ml-auto.gl-mr-auto
.gl-text-center.gl-ml-auto.gl-mr-auto
- providers.each do |provider|
= link_to omniauth_authorize_path(:user, provider, register_omniauth_params), method: :post, class: "btn gl-button btn-default gl-ml-2 gl-mr-2 gl-mb-2 js-oauth-login #{qa_selector_for_provider(provider)}", data: { provider: provider }, id: "oauth-login-#{provider}" do
= link_to omniauth_authorize_path(:user, provider, register_omniauth_params), method: :post, class: "btn gl-button btn-default gl-w-full gl-mb-4 js-oauth-login #{qa_selector_for_provider(provider)}", data: { provider: provider }, id: "oauth-login-#{provider}" do
- if provider_has_icon?(provider)
= provider_image_tag(provider)
%span.gl-button-text
@ -15,7 +15,7 @@
= _("Create an account using:")
.gl-display-flex.gl-justify-content-between.gl-flex-wrap
- providers.each do |provider|
= link_to omniauth_authorize_path(:user, provider, register_omniauth_params), method: :post, class: "btn gl-button btn-default gl-w-full gl-mb-3 js-oauth-login #{qa_selector_for_provider(provider)}", data: { provider: provider }, id: "oauth-login-#{provider}" do
= link_to omniauth_authorize_path(:user, provider, register_omniauth_params), method: :post, class: "btn gl-button btn-default gl-w-full gl-mb-4 js-oauth-login #{qa_selector_for_provider(provider)}", data: { provider: provider }, id: "oauth-login-#{provider}" do
- if provider_has_icon?(provider)
= provider_image_tag(provider)
%span.gl-button-text

View File

@ -1263,6 +1263,15 @@
:weight: 1
:idempotent: false
:tags: []
- :name: github_importer:github_import_pull_requests_import_review
:worker_name: Gitlab::GithubImport::PullRequests::ImportReviewWorker
:feature_category: :importers
:has_external_dependencies: true
:urgency: :low
:resource_boundary: :cpu
:weight: 1
:idempotent: false
:tags: []
- :name: github_importer:github_import_pull_requests_import_review_request
:worker_name: Gitlab::GithubImport::PullRequests::ImportReviewRequestWorker
:feature_category: :importers

View File

@ -1,5 +1,7 @@
# frozen_string_literal: true
# TODO: remove in 16.1 milestone
# https://gitlab.com/gitlab-org/gitlab/-/issues/409706
module Gitlab
module GithubImport
class ImportPullRequestReviewWorker # rubocop:disable Scalability/IdempotentWorker
@ -12,7 +14,7 @@ module Gitlab
end
def importer_class
Importer::PullRequestReviewImporter
Importer::PullRequests::ReviewImporter
end
def object_type

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
module PullRequests
class ImportReviewWorker # rubocop:disable Scalability/IdempotentWorker
include ObjectImporter
worker_resource_boundary :cpu
def representation_class
Gitlab::GithubImport::Representation::PullRequestReview
end
def importer_class
Importer::PullRequests::ReviewImporter
end
def object_type
:pull_request_review
end
end
end
end
end

View File

@ -15,7 +15,7 @@ module Gitlab
# client - An instance of Gitlab::GithubImport::Client.
# project - An instance of Project.
def import(client, project)
waiter = Importer::PullRequestsReviewsImporter
waiter = Importer::PullRequests::ReviewsImporter
.new(project, client)
.execute

View File

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/381667
milestone: '15.6'
type: development
group: "group::source code"
default_enabled: false
default_enabled: true

View File

@ -5,4 +5,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/404567
milestone: '15.11'
type: development
group: group::code review
default_enabled: false
default_enabled: true

View File

@ -0,0 +1,10 @@
---
table_name: remote_development_agent_configs
classes:
- RemoteDevelopment::RemoteDevelopmentAgentConfig
feature_categories:
- remote_development
description: Remote Development Cluster Agent Configuration
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/105783
milestone: '16.0'
gitlab_schema: gitlab_main

View File

@ -1,6 +1,7 @@
---
table_name: verification_codes
classes: []
classes:
-
feature_categories:
- jihu
description: Used by the JiHu edition for user verification

10
db/docs/workspaces.yml Normal file
View File

@ -0,0 +1,10 @@
---
table_name: workspaces
classes:
- RemoteDevelopment::Workspace
feature_categories:
- remote_development
description: Remote Development Workspaces
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/105783
milestone: '16.0'
gitlab_schema: gitlab_main

View File

@ -0,0 +1,44 @@
# frozen_string_literal: true
class CreateWorkspacesTable < Gitlab::Database::Migration[2.1]
def up
create_table :workspaces do |t|
t.timestamps_with_timezone null: false
# NOTE: All workspace foreign key references are currently `on_delete: :cascade`, because we have no support or
# testing around null values. However, in the future we may want to switch these to nullify, especially
# once we start introducing logging, metrics, billing, etc. around workspaces.
t.bigint :user_id, null: false, index: true
t.bigint :project_id, null: false, index: true
t.bigint :cluster_agent_id, null: false, index: true
t.datetime_with_timezone :desired_state_updated_at, null: false
t.datetime_with_timezone :responded_to_agent_at
t.integer :max_hours_before_termination, limit: 2, null: false
t.text :name, limit: 64, null: false, index: { unique: true }
t.text :namespace, limit: 64, null: false
t.text :desired_state, limit: 32, null: false
t.text :actual_state, limit: 32, null: false
t.text :editor, limit: 256, null: false
t.text :devfile_ref, limit: 256, null: false
t.text :devfile_path, limit: 2048, null: false
# NOTE: The limit on the devfile fields are arbitrary, and only added to avoid a rubocop
# Migration/AddLimitToTextColumns error. We expect the average devfile side to be small, perhaps ~0.5k for a
# devfile and ~2k for a processed_devfile, but to account for unexpected usage resulting in larger files,
# we have specified 65535, which allows for a YAML file with over 800 lines of an average 80-character
# length.
t.text :devfile, limit: 65535
t.text :processed_devfile, limit: 65535
t.text :url, limit: 1024, null: false
# NOTE: The resource version is currently backed by etcd's mod_revision.
# However, it's important to note that the application should not rely on the implementation details of
# the versioning system maintained by Kubernetes. We may change the implementation of resource version
# in the future, such as to change it to a timestamp or per-object counter.
# https://github.com/kubernetes/community/blob/master/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency
# The limit of 64 is arbitrary.
t.text :deployment_resource_version, limit: 64
end
end
def down
drop_table :workspaces
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class CreateWorkspacesUserForeignKey < Gitlab::Database::Migration[2.1]
disable_ddl_transaction!
def up
# NOTE: All workspace foreign key references are currently `on_delete: :cascade`, because we have no support or
# testing around null values. However, in the future we may want to switch these to nullify, especially
# once we start introducing logging, metrics, billing, etc. around workspaces.
add_concurrent_foreign_key :workspaces, :users, column: :user_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :workspaces, column: :user_id
end
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class CreateWorkspacesProjectForeignKey < Gitlab::Database::Migration[2.1]
disable_ddl_transaction!
def up
# NOTE: All workspace foreign key references are currently `on_delete: :cascade`, because we have no support or
# testing around null values. However, in the future we may want to switch these to nullify, especially
# once we start introducing logging, metrics, billing, etc. around workspaces.
add_concurrent_foreign_key :workspaces, :projects, column: :project_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :workspaces, column: :project_id
end
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class CreateWorkspacesClusterAgentForeignKey < Gitlab::Database::Migration[2.1]
disable_ddl_transaction!
def up
# NOTE: All workspace foreign key references are currently `on_delete: :cascade`, because we have no support or
# testing around null values. However, in the future we may want to switch these to nullify, especially
# once we start introducing logging, metrics, billing, etc. around workspaces.
add_concurrent_foreign_key :workspaces, :cluster_agents, column: :cluster_agent_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :workspaces, column: :cluster_agent_id
end
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class CreateRemoteDevelopmentAgentConfigsTable < Gitlab::Database::Migration[2.1]
def up
create_table :remote_development_agent_configs do |t|
t.timestamps_with_timezone null: false
t.bigint :cluster_agent_id, null: false, index: true
t.boolean :enabled, null: false
t.text :dns_zone, null: false, limit: 256
end
end
def down
drop_table :remote_development_agent_configs
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class CreateRemoteDevelopmentAgentConfigAgentForeignKey < Gitlab::Database::Migration[2.1]
disable_ddl_transaction!
def up
add_concurrent_foreign_key :remote_development_agent_configs,
:cluster_agents, column: :cluster_agent_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :remote_development_agent_configs, column: :cluster_agent_id
end
end
end

View File

@ -0,0 +1 @@
94810a223f2d37a673d690ba326577068c18d6353021a78a8f820cf8a95c756c

View File

@ -0,0 +1 @@
74a3b48267b16dcd9d3374b01604a0ae7f55dd35e681e3bf6bf5386ea4f6bdc3

View File

@ -0,0 +1 @@
bfa7df29a9f021b67db23127c6382161b131b77738f7a29dac5b64bc7431fd88

View File

@ -0,0 +1 @@
b2b2a169bb1d8581eec2706d03314d0675dcdf05b23b2787292b18ac1dfe7847

View File

@ -0,0 +1 @@
241ed02cdd479f06a5a4a817b2d27bfa970997167fbd67ddae1da8359830a2ea

View File

@ -0,0 +1 @@
08e0fd85bca9eff63f0fc5d1e34cca628ee191decddebcb90aaf98ce18f97147

View File

@ -11279,8 +11279,8 @@ CREATE TABLE appearances (
email_header_and_footer_enabled boolean DEFAULT false NOT NULL,
profile_image_guidelines text,
profile_image_guidelines_html text,
pwa_short_name text,
pwa_icon text,
pwa_short_name text,
pwa_name text,
pwa_description text,
CONSTRAINT appearances_profile_image_guidelines CHECK ((char_length(profile_image_guidelines) <= 4096)),
@ -11694,10 +11694,6 @@ CREATE TABLE application_settings (
database_grafana_api_url text,
database_grafana_tag text,
public_runner_releases_url text DEFAULT 'https://gitlab.com/api/v4/projects/gitlab-org%2Fgitlab-runner/releases'::text NOT NULL,
password_uppercase_required boolean DEFAULT false NOT NULL,
password_lowercase_required boolean DEFAULT false NOT NULL,
password_number_required boolean DEFAULT false NOT NULL,
password_symbol_required boolean DEFAULT false NOT NULL,
encrypted_arkose_labs_public_api_key bytea,
encrypted_arkose_labs_public_api_key_iv bytea,
encrypted_arkose_labs_private_api_key bytea,
@ -11708,14 +11704,14 @@ CREATE TABLE application_settings (
inactive_projects_min_size_mb integer DEFAULT 0 NOT NULL,
inactive_projects_send_warning_email_after_months integer DEFAULT 1 NOT NULL,
delayed_group_deletion boolean DEFAULT true NOT NULL,
maven_package_requests_forwarding boolean DEFAULT true NOT NULL,
arkose_labs_namespace text DEFAULT 'client'::text NOT NULL,
max_export_size integer DEFAULT 0,
encrypted_slack_app_signing_secret bytea,
encrypted_slack_app_signing_secret_iv bytea,
container_registry_pre_import_timeout integer DEFAULT 1800 NOT NULL,
container_registry_import_timeout integer DEFAULT 600 NOT NULL,
pipeline_limit_per_project_user_sha integer DEFAULT 0 NOT NULL,
encrypted_slack_app_signing_secret bytea,
encrypted_slack_app_signing_secret_iv bytea,
globally_allowed_ips text DEFAULT ''::text NOT NULL,
dingtalk_integration_enabled boolean DEFAULT false NOT NULL,
encrypted_dingtalk_corpid bytea,
encrypted_dingtalk_corpid_iv bytea,
@ -11723,8 +11719,11 @@ CREATE TABLE application_settings (
encrypted_dingtalk_app_key_iv bytea,
encrypted_dingtalk_app_secret bytea,
encrypted_dingtalk_app_secret_iv bytea,
password_uppercase_required boolean DEFAULT false NOT NULL,
password_lowercase_required boolean DEFAULT false NOT NULL,
password_number_required boolean DEFAULT false NOT NULL,
password_symbol_required boolean DEFAULT false NOT NULL,
jira_connect_application_key text,
globally_allowed_ips text DEFAULT ''::text NOT NULL,
container_registry_pre_import_tags_rate numeric(6,2) DEFAULT 0.5 NOT NULL,
license_usage_data_exported boolean DEFAULT false NOT NULL,
phone_verification_code_enabled boolean DEFAULT false NOT NULL,
@ -11739,33 +11738,34 @@ CREATE TABLE application_settings (
error_tracking_api_url text,
git_rate_limit_users_allowlist text[] DEFAULT '{}'::text[] NOT NULL,
error_tracking_access_token_encrypted text,
invitation_flow_enforcement boolean DEFAULT false NOT NULL,
package_registry_cleanup_policies_worker_capacity integer DEFAULT 2 NOT NULL,
deactivate_dormant_users_period integer DEFAULT 90 NOT NULL,
auto_ban_user_on_excessive_projects_download boolean DEFAULT false NOT NULL,
invitation_flow_enforcement boolean DEFAULT false NOT NULL,
max_pages_custom_domains_per_project integer DEFAULT 0 NOT NULL,
cube_api_base_url text,
encrypted_cube_api_key bytea,
encrypted_cube_api_key_iv bytea,
jitsu_host text,
jitsu_project_xid text,
jitsu_administrator_email text,
encrypted_jitsu_administrator_password bytea,
encrypted_jitsu_administrator_password_iv bytea,
maven_package_requests_forwarding boolean DEFAULT true NOT NULL,
dashboard_limit_enabled boolean DEFAULT false NOT NULL,
dashboard_limit integer DEFAULT 0 NOT NULL,
dashboard_notification_limit integer DEFAULT 0 NOT NULL,
dashboard_enforcement_limit integer DEFAULT 0 NOT NULL,
dashboard_limit_new_namespace_creation_enforcement_date date,
jitsu_host text,
jitsu_project_xid text,
jitsu_administrator_email text,
encrypted_jitsu_administrator_password bytea,
encrypted_jitsu_administrator_password_iv bytea,
can_create_group boolean DEFAULT true NOT NULL,
lock_maven_package_requests_forwarding boolean DEFAULT false NOT NULL,
lock_pypi_package_requests_forwarding boolean DEFAULT false NOT NULL,
lock_npm_package_requests_forwarding boolean DEFAULT false NOT NULL,
jira_connect_proxy_url text,
password_expiration_enabled boolean DEFAULT false NOT NULL,
password_expires_in_days integer DEFAULT 90 NOT NULL,
password_expires_notice_before_days integer DEFAULT 7 NOT NULL,
product_analytics_enabled boolean DEFAULT false NOT NULL,
jira_connect_proxy_url text,
email_confirmation_setting smallint DEFAULT 0,
disable_admin_oauth_scopes boolean DEFAULT false NOT NULL,
default_preferred_language text DEFAULT 'en'::text NOT NULL,
@ -11774,37 +11774,37 @@ CREATE TABLE application_settings (
encrypted_telesign_customer_xid_iv bytea,
encrypted_telesign_api_key bytea,
encrypted_telesign_api_key_iv bytea,
disable_personal_access_tokens boolean DEFAULT false NOT NULL,
max_terraform_state_size_bytes integer DEFAULT 0 NOT NULL,
disable_personal_access_tokens boolean DEFAULT false NOT NULL,
bulk_import_enabled boolean DEFAULT false NOT NULL,
allow_runner_registration_token boolean DEFAULT true NOT NULL,
user_defaults_to_private_profile boolean DEFAULT false NOT NULL,
allow_possible_spam boolean DEFAULT false NOT NULL,
default_syntax_highlighting_theme integer DEFAULT 1 NOT NULL,
allow_runner_registration_token boolean DEFAULT true NOT NULL,
encrypted_product_analytics_clickhouse_connection_string bytea,
encrypted_product_analytics_clickhouse_connection_string_iv bytea,
allow_possible_spam boolean DEFAULT false NOT NULL,
search_max_shard_size_gb integer DEFAULT 50 NOT NULL,
search_max_docs_denominator integer DEFAULT 5000000 NOT NULL,
search_min_docs_before_rollover integer DEFAULT 100000 NOT NULL,
deactivation_email_additional_text text,
jira_connect_public_key_storage_enabled boolean DEFAULT false NOT NULL,
git_rate_limit_users_alertlist integer[] DEFAULT '{}'::integer[] NOT NULL,
allow_deploy_tokens_and_keys_with_external_authn boolean DEFAULT false NOT NULL,
jira_connect_public_key_storage_enabled boolean DEFAULT false NOT NULL,
security_policy_global_group_approvers_enabled boolean DEFAULT true NOT NULL,
default_syntax_highlighting_theme integer DEFAULT 1 NOT NULL,
allow_deploy_tokens_and_keys_with_external_authn boolean DEFAULT false NOT NULL,
projects_api_rate_limit_unauthenticated integer DEFAULT 400 NOT NULL,
deny_all_requests_except_allowed boolean DEFAULT false NOT NULL,
product_analytics_data_collector_host text,
lock_memberships_to_saml boolean DEFAULT false NOT NULL,
gitlab_dedicated_instance boolean DEFAULT false NOT NULL,
update_runner_versions_enabled boolean DEFAULT true NOT NULL,
gitlab_dedicated_instance boolean DEFAULT false NOT NULL,
database_apdex_settings jsonb,
encrypted_openai_api_key bytea,
encrypted_openai_api_key_iv bytea,
database_max_running_batched_background_migrations integer DEFAULT 2 NOT NULL,
encrypted_product_analytics_configurator_connection_string bytea,
encrypted_product_analytics_configurator_connection_string_iv bytea,
silent_mode_enabled boolean DEFAULT false NOT NULL,
package_metadata_purl_types smallint[] DEFAULT '{}'::smallint[],
encrypted_product_analytics_configurator_connection_string bytea,
encrypted_product_analytics_configurator_connection_string_iv bytea,
ci_max_includes integer DEFAULT 150 NOT NULL,
encrypted_tofa_credentials bytea,
encrypted_tofa_credentials_iv bytea,
@ -18724,13 +18724,13 @@ CREATE TABLE namespace_settings (
runner_token_expiration_interval integer,
subgroup_runner_token_expiration_interval integer,
project_runner_token_expiration_interval integer,
show_diff_preview_in_email boolean DEFAULT true NOT NULL,
enabled_git_access_protocol smallint DEFAULT 0 NOT NULL,
unique_project_download_limit smallint DEFAULT 0 NOT NULL,
unique_project_download_limit_interval_in_seconds integer DEFAULT 0 NOT NULL,
project_import_level smallint DEFAULT 50 NOT NULL,
unique_project_download_limit_allowlist text[] DEFAULT '{}'::text[] NOT NULL,
auto_ban_user_on_excessive_projects_download boolean DEFAULT false NOT NULL,
show_diff_preview_in_email boolean DEFAULT true NOT NULL,
only_allow_merge_if_pipeline_succeeds boolean DEFAULT false NOT NULL,
allow_merge_on_skipped_pipeline boolean DEFAULT false NOT NULL,
only_allow_merge_if_all_discussions_are_resolved boolean DEFAULT false NOT NULL,
@ -20117,6 +20117,7 @@ CREATE TABLE plan_limits (
helm_max_file_size bigint DEFAULT 5242880 NOT NULL,
ci_registered_group_runners integer DEFAULT 1000 NOT NULL,
ci_registered_project_runners integer DEFAULT 1000 NOT NULL,
web_hook_calls integer DEFAULT 0 NOT NULL,
ci_daily_pipeline_schedule_triggers integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_running_container_scanning integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_cluster_image_scanning integer DEFAULT 0 NOT NULL,
@ -20141,7 +20142,6 @@ CREATE TABLE plan_limits (
enforcement_limit integer DEFAULT 0 NOT NULL,
notification_limit integer DEFAULT 0 NOT NULL,
dashboard_limit_enabled_at timestamp with time zone,
web_hook_calls integer DEFAULT 0 NOT NULL,
project_access_token_limit integer DEFAULT 0 NOT NULL
);
@ -21186,11 +21186,11 @@ CREATE TABLE project_settings (
target_platforms character varying[] DEFAULT '{}'::character varying[] NOT NULL,
enforce_auth_checks_on_uploads boolean DEFAULT true NOT NULL,
selective_code_owner_removals boolean DEFAULT false NOT NULL,
issue_branch_template text,
show_diff_preview_in_email boolean DEFAULT true NOT NULL,
jitsu_key text,
suggested_reviewers_enabled boolean DEFAULT false NOT NULL,
jitsu_key text,
only_allow_merge_if_all_status_checks_passed boolean DEFAULT false NOT NULL,
issue_branch_template text,
mirror_branch_regex text,
allow_pipeline_trigger_approve_deployment boolean DEFAULT false NOT NULL,
emails_enabled boolean DEFAULT true NOT NULL,
@ -21821,6 +21821,25 @@ CREATE SEQUENCE releases_id_seq
ALTER SEQUENCE releases_id_seq OWNED BY releases.id;
CREATE TABLE remote_development_agent_configs (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
cluster_agent_id bigint NOT NULL,
enabled boolean NOT NULL,
dns_zone text NOT NULL,
CONSTRAINT check_9f5cd54d1c CHECK ((char_length(dns_zone) <= 256))
);
CREATE SEQUENCE remote_development_agent_configs_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE remote_development_agent_configs_id_seq OWNED BY remote_development_agent_configs.id;
CREATE TABLE remote_mirrors (
id integer NOT NULL,
project_id integer,
@ -24598,6 +24617,49 @@ CREATE SEQUENCE work_item_widget_definitions_id_seq
ALTER SEQUENCE work_item_widget_definitions_id_seq OWNED BY work_item_widget_definitions.id;
CREATE TABLE workspaces (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
user_id bigint NOT NULL,
project_id bigint NOT NULL,
cluster_agent_id bigint NOT NULL,
desired_state_updated_at timestamp with time zone NOT NULL,
responded_to_agent_at timestamp with time zone,
max_hours_before_termination smallint NOT NULL,
name text NOT NULL,
namespace text NOT NULL,
desired_state text NOT NULL,
actual_state text NOT NULL,
editor text NOT NULL,
devfile_ref text NOT NULL,
devfile_path text NOT NULL,
devfile text,
processed_devfile text,
url text NOT NULL,
deployment_resource_version text,
CONSTRAINT check_15543fb0fa CHECK ((char_length(name) <= 64)),
CONSTRAINT check_157d5f955c CHECK ((char_length(namespace) <= 64)),
CONSTRAINT check_2b401b0034 CHECK ((char_length(deployment_resource_version) <= 64)),
CONSTRAINT check_77d1a2ff50 CHECK ((char_length(processed_devfile) <= 65535)),
CONSTRAINT check_8e363ee3ad CHECK ((char_length(devfile_ref) <= 256)),
CONSTRAINT check_8e4db5ffc2 CHECK ((char_length(actual_state) <= 32)),
CONSTRAINT check_9e42558c35 CHECK ((char_length(url) <= 1024)),
CONSTRAINT check_b70eddcbc1 CHECK ((char_length(desired_state) <= 32)),
CONSTRAINT check_d7ed376e49 CHECK ((char_length(editor) <= 256)),
CONSTRAINT check_dc58d56169 CHECK ((char_length(devfile_path) <= 2048)),
CONSTRAINT check_eb32879a3d CHECK ((char_length(devfile) <= 65535))
);
CREATE SEQUENCE workspaces_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE workspaces_id_seq OWNED BY workspaces.id;
CREATE TABLE x509_certificates (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@ -25597,6 +25659,8 @@ ALTER TABLE ONLY release_links ALTER COLUMN id SET DEFAULT nextval('release_link
ALTER TABLE ONLY releases ALTER COLUMN id SET DEFAULT nextval('releases_id_seq'::regclass);
ALTER TABLE ONLY remote_development_agent_configs ALTER COLUMN id SET DEFAULT nextval('remote_development_agent_configs_id_seq'::regclass);
ALTER TABLE ONLY remote_mirrors ALTER COLUMN id SET DEFAULT nextval('remote_mirrors_id_seq'::regclass);
ALTER TABLE ONLY required_code_owners_sections ALTER COLUMN id SET DEFAULT nextval('required_code_owners_sections_id_seq'::regclass);
@ -25837,6 +25901,8 @@ ALTER TABLE ONLY work_item_types ALTER COLUMN id SET DEFAULT nextval('work_item_
ALTER TABLE ONLY work_item_widget_definitions ALTER COLUMN id SET DEFAULT nextval('work_item_widget_definitions_id_seq'::regclass);
ALTER TABLE ONLY workspaces ALTER COLUMN id SET DEFAULT nextval('workspaces_id_seq'::regclass);
ALTER TABLE ONLY x509_certificates ALTER COLUMN id SET DEFAULT nextval('x509_certificates_id_seq'::regclass);
ALTER TABLE ONLY x509_commit_signatures ALTER COLUMN id SET DEFAULT nextval('x509_commit_signatures_id_seq'::regclass);
@ -27972,6 +28038,9 @@ ALTER TABLE releases
ALTER TABLE ONLY releases
ADD CONSTRAINT releases_pkey PRIMARY KEY (id);
ALTER TABLE ONLY remote_development_agent_configs
ADD CONSTRAINT remote_development_agent_configs_pkey PRIMARY KEY (id);
ALTER TABLE ONLY remote_mirrors
ADD CONSTRAINT remote_mirrors_pkey PRIMARY KEY (id);
@ -28383,6 +28452,9 @@ ALTER TABLE ONLY work_item_types
ALTER TABLE ONLY work_item_widget_definitions
ADD CONSTRAINT work_item_widget_definitions_pkey PRIMARY KEY (id);
ALTER TABLE ONLY workspaces
ADD CONSTRAINT workspaces_pkey PRIMARY KEY (id);
ALTER TABLE ONLY x509_certificates
ADD CONSTRAINT x509_certificates_pkey PRIMARY KEY (id);
@ -31354,6 +31426,8 @@ CREATE UNIQUE INDEX index_merge_request_reviewers_on_merge_request_id_and_user_i
CREATE INDEX index_merge_request_reviewers_on_user_id ON merge_request_reviewers USING btree (user_id);
CREATE UNIQUE INDEX index_merge_request_user_mentions_note_id_convert_to_bigint ON merge_request_user_mentions USING btree (note_id_convert_to_bigint) WHERE (note_id_convert_to_bigint IS NOT NULL);
CREATE UNIQUE INDEX index_merge_request_user_mentions_on_note_id ON merge_request_user_mentions USING btree (note_id) WHERE (note_id IS NOT NULL);
CREATE INDEX index_merge_requests_closing_issues_on_issue_id ON merge_requests_closing_issues USING btree (issue_id);
@ -32224,6 +32298,8 @@ CREATE UNIQUE INDEX index_releases_on_project_tag_unique ON releases USING btree
CREATE INDEX index_releases_on_released_at ON releases USING btree (released_at);
CREATE INDEX index_remote_development_agent_configs_on_cluster_agent_id ON remote_development_agent_configs USING btree (cluster_agent_id);
CREATE INDEX index_remote_mirrors_on_last_successful_update_at ON remote_mirrors USING btree (last_successful_update_at);
CREATE INDEX index_remote_mirrors_on_project_id ON remote_mirrors USING btree (project_id);
@ -33032,6 +33108,14 @@ CREATE UNIQUE INDEX index_work_item_widget_definitions_on_namespace_type_and_nam
CREATE INDEX index_work_item_widget_definitions_on_work_item_type_id ON work_item_widget_definitions USING btree (work_item_type_id);
CREATE INDEX index_workspaces_on_cluster_agent_id ON workspaces USING btree (cluster_agent_id);
CREATE UNIQUE INDEX index_workspaces_on_name ON workspaces USING btree (name);
CREATE INDEX index_workspaces_on_project_id ON workspaces USING btree (project_id);
CREATE INDEX index_workspaces_on_user_id ON workspaces USING btree (user_id);
CREATE INDEX index_x509_certificates_on_subject_key_identifier ON x509_certificates USING btree (subject_key_identifier);
CREATE INDEX index_x509_certificates_on_x509_issuer_id ON x509_certificates USING btree (x509_issuer_id);
@ -34718,6 +34802,9 @@ ALTER TABLE ONLY user_interacted_projects
ALTER TABLE ONLY merge_request_assignment_events
ADD CONSTRAINT fk_08f7602bfd FOREIGN KEY (merge_request_id) REFERENCES merge_requests(id) ON DELETE CASCADE;
ALTER TABLE ONLY remote_development_agent_configs
ADD CONSTRAINT fk_0a3c0ada56 FOREIGN KEY (cluster_agent_id) REFERENCES cluster_agents(id) ON DELETE CASCADE;
ALTER TABLE ONLY dast_sites
ADD CONSTRAINT fk_0a57f2271b FOREIGN KEY (dast_site_validation_id) REFERENCES dast_site_validations(id) ON DELETE SET NULL;
@ -35405,6 +35492,9 @@ ALTER TABLE ONLY resource_link_events
ALTER TABLE ONLY metrics_users_starred_dashboards
ADD CONSTRAINT fk_bd6ae32fac FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ALTER TABLE ONLY workspaces
ADD CONSTRAINT fk_bdb0b31131 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ALTER TABLE ONLY project_compliance_framework_settings
ADD CONSTRAINT fk_be413374a9 FOREIGN KEY (framework_id) REFERENCES compliance_management_frameworks(id) ON DELETE CASCADE;
@ -35549,6 +35639,9 @@ ALTER TABLE ONLY web_hooks
ALTER TABLE ONLY security_scans
ADD CONSTRAINT fk_dbc89265b9 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY workspaces
ADD CONSTRAINT fk_dc7c316be1 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY epics
ADD CONSTRAINT fk_dccd3f98fc FOREIGN KEY (assignee_id) REFERENCES users(id) ON DELETE SET NULL;
@ -35654,6 +35747,9 @@ ALTER TABLE ONLY user_project_callouts
ALTER TABLE ONLY approval_merge_request_rules
ADD CONSTRAINT fk_f726c79756 FOREIGN KEY (scan_result_policy_id) REFERENCES scan_result_policies(id) ON DELETE CASCADE;
ALTER TABLE ONLY workspaces
ADD CONSTRAINT fk_f78aeddc77 FOREIGN KEY (cluster_agent_id) REFERENCES cluster_agents(id) ON DELETE CASCADE;
ALTER TABLE ONLY cluster_agents
ADD CONSTRAINT fk_f7d43dee13 FOREIGN KEY (created_by_user_id) REFERENCES users(id) ON DELETE SET NULL;
@ -35684,6 +35780,9 @@ ALTER TABLE ONLY issues
ALTER TABLE ONLY geo_event_log
ADD CONSTRAINT fk_geo_event_log_on_geo_event_id FOREIGN KEY (geo_event_id) REFERENCES geo_events(id) ON DELETE CASCADE;
ALTER TABLE ONLY merge_request_user_mentions
ADD CONSTRAINT fk_merge_request_user_mentions_note_id_convert_to_bigint FOREIGN KEY (note_id_convert_to_bigint) REFERENCES notes(id) ON DELETE CASCADE NOT VALID;
ALTER TABLE ONLY ml_candidate_metrics
ADD CONSTRAINT fk_ml_candidate_metrics_on_candidate_id FOREIGN KEY (candidate_id) REFERENCES ml_candidates(id) ON DELETE CASCADE;

View File

@ -770,6 +770,42 @@ Returns [`WorkItem`](#workitem).
| ---- | ---- | ----------- |
| <a id="queryworkitemid"></a>`id` | [`WorkItemID!`](#workitemid) | Global ID of the work item. |
### `Query.workspace`
Find a workspace.
WARNING:
**Introduced** in 16.0.
This feature is an Experiment. It can be changed or removed at any time.
Returns [`Workspace`](#workspace).
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="queryworkspaceid"></a>`id` | [`RemoteDevelopmentWorkspaceID!`](#remotedevelopmentworkspaceid) | Find a workspace by its ID. |
### `Query.workspaces`
Find workspaces owned by the current user by their IDs.
WARNING:
**Introduced** in 16.0.
This feature is an Experiment. It can be changed or removed at any time.
Returns [`WorkspaceConnection`](#workspaceconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#connection-pagination-arguments):
`before: String`, `after: String`, `first: Int`, `last: Int`.
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="queryworkspacesids"></a>`ids` | [`[RemoteDevelopmentWorkspaceID!]`](#remotedevelopmentworkspaceid) | Array of global workspace IDs. For example, `["gid://gitlab/RemoteDevelopment::Workspace/1"]`. |
## `Mutation` type
The `Mutation` type contains all the mutations you can execute.
@ -6417,6 +6453,7 @@ Input type: `UserPreferencesUpdateInput`
| ---- | ---- | ----------- |
| <a id="mutationuserpreferencesupdateclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationuserpreferencesupdateissuessort"></a>`issuesSort` | [`IssueSort`](#issuesort) | Sort order for issue lists. |
| <a id="mutationuserpreferencesupdatevisibilitypipelineidtype"></a>`visibilityPipelineIdType` | [`VisibilityPipelineIdType`](#visibilitypipelineidtype) | Determines whether the pipeline list shows ID or IID. |
#### Fields
@ -6835,6 +6872,59 @@ Input type: `WorkItemUpdateTaskInput`
| <a id="mutationworkitemupdatetasktask"></a>`task` | [`WorkItem`](#workitem) | Updated task. |
| <a id="mutationworkitemupdatetaskworkitem"></a>`workItem` | [`WorkItem`](#workitem) | Updated work item. |
### `Mutation.workspaceCreate`
WARNING:
**Introduced** in 16.0.
This feature is an Experiment. It can be changed or removed at any time.
Input type: `WorkspaceCreateInput`
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationworkspacecreateclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationworkspacecreateclusteragentid"></a>`clusterAgentId` | [`ClustersAgentID!`](#clustersagentid) | ID of the cluster agent the created workspace will be associated with. |
| <a id="mutationworkspacecreatedesiredstate"></a>`desiredState` | [`String!`](#string) | Desired state of the created workspace. |
| <a id="mutationworkspacecreatedevfilepath"></a>`devfilePath` | [`String!`](#string) | Project repo git path containing the devfile used to configure the workspace. |
| <a id="mutationworkspacecreatedevfileref"></a>`devfileRef` | [`String!`](#string) | Project repo git ref containing the devfile used to configure the workspace. |
| <a id="mutationworkspacecreateeditor"></a>`editor` | [`String!`](#string) | Editor to inject into the created workspace. Must match a configured template. |
| <a id="mutationworkspacecreatemaxhoursbeforetermination"></a>`maxHoursBeforeTermination` | [`Int!`](#int) | Maximum hours the workspace can exist before it is automatically terminated. |
| <a id="mutationworkspacecreateprojectid"></a>`projectId` | [`ProjectID!`](#projectid) | ID of the project that will provide the Devfile for the created workspace. |
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationworkspacecreateclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationworkspacecreateerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
| <a id="mutationworkspacecreateworkspace"></a>`workspace` | [`Workspace`](#workspace) | Created workspace. |
### `Mutation.workspaceUpdate`
WARNING:
**Introduced** in 16.0.
This feature is an Experiment. It can be changed or removed at any time.
Input type: `WorkspaceUpdateInput`
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationworkspaceupdateclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationworkspaceupdatedesiredstate"></a>`desiredState` | [`String!`](#string) | Desired state of the created workspace. |
| <a id="mutationworkspaceupdateid"></a>`id` | [`RemoteDevelopmentWorkspaceID!`](#remotedevelopmentworkspaceid) | Global ID of the workspace. |
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mutationworkspaceupdateclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
| <a id="mutationworkspaceupdateerrors"></a>`errors` | [`[String!]!`](#string) | Errors encountered during execution of the mutation. |
| <a id="mutationworkspaceupdateworkspace"></a>`workspace` | [`Workspace`](#workspace) | Created workspace. |
## Connections
Some types in our schema are `Connection` types - they represent a paginated
@ -11132,6 +11222,29 @@ The edge type for [`WorkItemType`](#workitemtype).
| <a id="workitemtypeedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="workitemtypeedgenode"></a>`node` | [`WorkItemType`](#workitemtype) | The item at the end of the edge. |
#### `WorkspaceConnection`
The connection type for [`Workspace`](#workspace).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="workspaceconnectionedges"></a>`edges` | [`[WorkspaceEdge]`](#workspaceedge) | A list of edges. |
| <a id="workspaceconnectionnodes"></a>`nodes` | [`[Workspace]`](#workspace) | A list of nodes. |
| <a id="workspaceconnectionpageinfo"></a>`pageInfo` | [`PageInfo!`](#pageinfo) | Information to aid in pagination. |
#### `WorkspaceEdge`
The edge type for [`Workspace`](#workspace).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="workspaceedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="workspaceedgenode"></a>`node` | [`Workspace`](#workspace) | The item at the end of the edge. |
## Object types
Object types represent the resources that the GitLab GraphQL API can return.
@ -16765,6 +16878,22 @@ four standard [pagination arguments](#connection-pagination-arguments):
| <a id="mergerequestassigneetodosstate"></a>`state` | [`[TodoStateEnum!]`](#todostateenum) | State of the todo. |
| <a id="mergerequestassigneetodostype"></a>`type` | [`[TodoTargetEnum!]`](#todotargetenum) | Type of the todo. |
##### `MergeRequestAssignee.workspaces`
Workspaces owned by the current user.
Returns [`WorkspaceConnection`](#workspaceconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#connection-pagination-arguments):
`before: String`, `after: String`, `first: Int`, `last: Int`.
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mergerequestassigneeworkspacesids"></a>`ids` | [`[RemoteDevelopmentWorkspaceID!]`](#remotedevelopmentworkspaceid) | Array of global workspace IDs. For example, `["gid://gitlab/RemoteDevelopment::Workspace/1"]`. |
### `MergeRequestAuthor`
The author of the merge request.
@ -17015,6 +17144,22 @@ four standard [pagination arguments](#connection-pagination-arguments):
| <a id="mergerequestauthortodosstate"></a>`state` | [`[TodoStateEnum!]`](#todostateenum) | State of the todo. |
| <a id="mergerequestauthortodostype"></a>`type` | [`[TodoTargetEnum!]`](#todotargetenum) | Type of the todo. |
##### `MergeRequestAuthor.workspaces`
Workspaces owned by the current user.
Returns [`WorkspaceConnection`](#workspaceconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#connection-pagination-arguments):
`before: String`, `after: String`, `first: Int`, `last: Int`.
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mergerequestauthorworkspacesids"></a>`ids` | [`[RemoteDevelopmentWorkspaceID!]`](#remotedevelopmentworkspaceid) | Array of global workspace IDs. For example, `["gid://gitlab/RemoteDevelopment::Workspace/1"]`. |
### `MergeRequestDiffRegistry`
Represents the Geo sync and verification state of a Merge Request diff.
@ -17284,6 +17429,22 @@ four standard [pagination arguments](#connection-pagination-arguments):
| <a id="mergerequestparticipanttodosstate"></a>`state` | [`[TodoStateEnum!]`](#todostateenum) | State of the todo. |
| <a id="mergerequestparticipanttodostype"></a>`type` | [`[TodoTargetEnum!]`](#todotargetenum) | Type of the todo. |
##### `MergeRequestParticipant.workspaces`
Workspaces owned by the current user.
Returns [`WorkspaceConnection`](#workspaceconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#connection-pagination-arguments):
`before: String`, `after: String`, `first: Int`, `last: Int`.
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mergerequestparticipantworkspacesids"></a>`ids` | [`[RemoteDevelopmentWorkspaceID!]`](#remotedevelopmentworkspaceid) | Array of global workspace IDs. For example, `["gid://gitlab/RemoteDevelopment::Workspace/1"]`. |
### `MergeRequestPermissions`
Check permissions for the current user on a merge request.
@ -17553,6 +17714,22 @@ four standard [pagination arguments](#connection-pagination-arguments):
| <a id="mergerequestreviewertodosstate"></a>`state` | [`[TodoStateEnum!]`](#todostateenum) | State of the todo. |
| <a id="mergerequestreviewertodostype"></a>`type` | [`[TodoTargetEnum!]`](#todotargetenum) | Type of the todo. |
##### `MergeRequestReviewer.workspaces`
Workspaces owned by the current user.
Returns [`WorkspaceConnection`](#workspaceconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#connection-pagination-arguments):
`before: String`, `after: String`, `first: Int`, `last: Int`.
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="mergerequestreviewerworkspacesids"></a>`ids` | [`[RemoteDevelopmentWorkspaceID!]`](#remotedevelopmentworkspaceid) | Array of global workspace IDs. For example, `["gid://gitlab/RemoteDevelopment::Workspace/1"]`. |
### `Metadata`
#### Fields
@ -21967,6 +22144,22 @@ four standard [pagination arguments](#connection-pagination-arguments):
| <a id="usercoretodosstate"></a>`state` | [`[TodoStateEnum!]`](#todostateenum) | State of the todo. |
| <a id="usercoretodostype"></a>`type` | [`[TodoTargetEnum!]`](#todotargetenum) | Type of the todo. |
##### `UserCore.workspaces`
Workspaces owned by the current user.
Returns [`WorkspaceConnection`](#workspaceconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#connection-pagination-arguments):
`before: String`, `after: String`, `first: Int`, `last: Int`.
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="usercoreworkspacesids"></a>`ids` | [`[RemoteDevelopmentWorkspaceID!]`](#remotedevelopmentworkspaceid) | Array of global workspace IDs. For example, `["gid://gitlab/RemoteDevelopment::Workspace/1"]`. |
### `UserMergeRequestInteraction`
Information about a merge request given a specific user.
@ -22000,6 +22193,7 @@ fields relate to interactions between the two entities.
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="userpreferencesissuessort"></a>`issuesSort` | [`IssueSort`](#issuesort) | Sort order for issue lists. |
| <a id="userpreferencesvisibilitypipelineidtype"></a>`visibilityPipelineIdType` | [`VisibilityPipelineIdType`](#visibilitypipelineidtype) | Determines whether the pipeline list shows ID or IID. |
### `UserStatus`
@ -22951,6 +23145,35 @@ Represents a weight widget.
| <a id="workitemwidgetweighttype"></a>`type` | [`WorkItemWidgetType`](#workitemwidgettype) | Widget type. |
| <a id="workitemwidgetweightweight"></a>`weight` | [`Int`](#int) | Weight of the work item. |
### `Workspace`
Represents a remote development workspace.
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="workspaceactualstate"></a>`actualState` | [`String!`](#string) | Actual state of the workspace. |
| <a id="workspaceclusteragent"></a>`clusterAgent` | [`ClusterAgent!`](#clusteragent) | Kubernetes Agent associated with the workspace. |
| <a id="workspacecreatedat"></a>`createdAt` | [`Time!`](#time) | Timestamp of workspace creation. |
| <a id="workspacedeploymentresourceversion"></a>`deploymentResourceVersion` | [`Int`](#int) | ResourceVersion of the Deployment resource for the workspace. |
| <a id="workspacedesiredstate"></a>`desiredState` | [`String!`](#string) | Desired state of the workspace. |
| <a id="workspacedesiredstateupdatedat"></a>`desiredStateUpdatedAt` | [`Time!`](#time) | Timestamp of last update to desired state. |
| <a id="workspacedevfile"></a>`devfile` | [`String!`](#string) | Source YAML of the devfile used to configure the workspace. |
| <a id="workspacedevfilepath"></a>`devfilePath` | [`String!`](#string) | Project repo git path containing the devfile used to configure the workspace. |
| <a id="workspacedevfileref"></a>`devfileRef` | [`String!`](#string) | Project repo git ref containing the devfile used to configure the workspace. |
| <a id="workspaceeditor"></a>`editor` | [`String!`](#string) | Editor used to configure the workspace. Must match a configured template. |
| <a id="workspaceid"></a>`id` | [`RemoteDevelopmentWorkspaceID!`](#remotedevelopmentworkspaceid) | Global ID of the workspace. |
| <a id="workspacemaxhoursbeforetermination"></a>`maxHoursBeforeTermination` | [`Int!`](#int) | Maximum hours the workspace can exist before it is automatically terminated. |
| <a id="workspacename"></a>`name` | [`String!`](#string) | Name of the workspace in Kubernetes. |
| <a id="workspacenamespace"></a>`namespace` | [`String!`](#string) | Namespace of the workspace in Kubernetes. |
| <a id="workspaceprocesseddevfile"></a>`processedDevfile` | [`String!`](#string) | Processed YAML of the devfile used to configure the workspace. |
| <a id="workspaceprojectid"></a>`projectId` | [`ID!`](#id) | ID of the Project providing the Devfile for the workspace. |
| <a id="workspacerespondedtoagentat"></a>`respondedToAgentAt` | [`Time`](#time) | Timestamp of last response sent to GA4K for the workspace. |
| <a id="workspaceupdatedat"></a>`updatedAt` | [`Time!`](#time) | Timestamp of last update to any mutable workspace property. |
| <a id="workspaceurl"></a>`url` | [`String!`](#string) | URL of the workspace. |
| <a id="workspaceuser"></a>`user` | [`UserCore!`](#usercore) | Owner of the workspace. |
### `X509Certificate`
Represents an X.509 certificate.
@ -25092,6 +25315,15 @@ Verification status of a GPG or X.509 signature for a commit.
| <a id="visibilitylevelsenumprivate"></a>`private` | Private visibility level. |
| <a id="visibilitylevelsenumpublic"></a>`public` | Public visibility level. |
### `VisibilityPipelineIdType`
Determines whether the pipeline list shows ID or IID.
| Value | Description |
| ----- | ----------- |
| <a id="visibilitypipelineidtypeid"></a>`ID` | Display pipeline ID. |
| <a id="visibilitypipelineidtypeiid"></a>`IID` | Display pipeline IID. |
### `VisibilityScopesEnum`
| Value | Description |
@ -25878,6 +26110,12 @@ A `ReleasesLinkID` is a global ID. It is encoded as a string.
An example `ReleasesLinkID` is: `"gid://gitlab/Releases::Link/1"`.
### `RemoteDevelopmentWorkspaceID`
A `RemoteDevelopmentWorkspaceID` is a global ID. It is encoded as a string.
An example `RemoteDevelopmentWorkspaceID` is: `"gid://gitlab/RemoteDevelopment::Workspace/1"`.
### `SecurityTrainingProviderID`
A `SecurityTrainingProviderID` is a global ID. It is encoded as a string.
@ -26673,6 +26911,22 @@ four standard [pagination arguments](#connection-pagination-arguments):
| <a id="usertodosstate"></a>`state` | [`[TodoStateEnum!]`](#todostateenum) | State of the todo. |
| <a id="usertodostype"></a>`type` | [`[TodoTargetEnum!]`](#todotargetenum) | Type of the todo. |
###### `User.workspaces`
Workspaces owned by the current user.
Returns [`WorkspaceConnection`](#workspaceconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#connection-pagination-arguments):
`before: String`, `after: String`, `first: Int`, `last: Int`.
####### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="userworkspacesids"></a>`ids` | [`[RemoteDevelopmentWorkspaceID!]`](#remotedevelopmentworkspaceid) | Array of global workspace IDs. For example, `["gid://gitlab/RemoteDevelopment::Workspace/1"]`. |
#### `WorkItemWidget`
Implementations:

View File

@ -2943,13 +2943,14 @@ Example response:
## Configure pull mirroring for a project **(PREMIUM)**
> - Moved to GitLab Premium in GitLab 13.9.
> - Field `mirror_branch_regex` [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/102608) in GitLab 15.8 [with a flag](../administration/feature_flags.md) named `mirror_only_branches_match_regex`. Disabled by default.
> - Field `mirror_branch_regex` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/381667) in GitLab 15.8 [with a flag](../administration/feature_flags.md) named `mirror_only_branches_match_regex`. Disabled by default.
> - [Enabled by default](https://gitlab.com/gitlab-org/gitlab/-/issues/381667) in GitLab 16.0.
FLAG:
On self-managed GitLab, by default the field `mirror_branch_regex` is not available.
To make it available, ask an administrator to [enable the feature flag](../administration/feature_flags.md)
On self-managed GitLab, by default the field `mirror_branch_regex` is available.
To hide the feature, ask an administrator to [disable the feature flag](../administration/feature_flags.md)
named `mirror_only_branches_match_regex`.
On GitLab.com, this feature is not available.
On GitLab.com, this feature is available.
Configure pull mirroring while [creating a new project](#create-project)
or [updating an existing project](#edit-project) using the API

View File

@ -91,13 +91,14 @@ Learn how to [configure a pull mirror](projects.md#configure-pull-mirroring-for-
## Create a push mirror
> Field `mirror_branch_regex` [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/102608) in GitLab 15.8 [with a flag](../administration/feature_flags.md) named `mirror_only_branches_match_regex`. Disabled by default.
> - Field `mirror_branch_regex` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/381667) in GitLab 15.8 [with a flag](../administration/feature_flags.md) named `mirror_only_branches_match_regex`. Disabled by default.
> - [Enabled by default](https://gitlab.com/gitlab-org/gitlab/-/issues/381667) in GitLab 16.0.
FLAG:
On self-managed GitLab, by default the field `mirror_branch_regex` is not available.
To make it available, ask an administrator to [enable the feature flag](../administration/feature_flags.md)
On self-managed GitLab, by default the field `mirror_branch_regex` is available.
To hide the feature, ask an administrator to [disable the feature flag](../administration/feature_flags.md)
named `mirror_only_branches_match_regex`.
On GitLab.com, this feature is not available.
On GitLab.com, this feature is available.
Push mirroring is disabled by default. To enable it, include the optional parameter
`enabled` when you create the mirror:

View File

@ -96,7 +96,7 @@ This worker imports assigned reviewers of pull requests. For each pull request,
This worker imports reviews of pull requests. For each pull request, this worker:
- Fetches all the pages of reviews.
- Schedules a `Gitlab::GithubImport::ImportPullRequestReviewWorker` job for each fetched review.
- Schedules a `Gitlab::GithubImport::PullRequests::ImportReviewWorker` job for each fetched review.
### 9. Stage::ImportIssuesAndDiffNotesWorker

View File

@ -530,6 +530,63 @@ end
When running mobile tests for phone layouts, both `remote_mobile_device_name` and `mobile_layout` are `true` but when using a tablet layout, only `remote_mobile_device_name` is true. This is because phone layouts have more menus closed by default such as how both tablets and phones have the left nav closed but unlike phone layouts, tablets have the regular top navigation bar, not the mobile one. So in the case where the navigation being edited needs to be used in tablet layouts as well, use `remote_mobile_device_name` instead of `mobile_layout?` when prepending so it will use it if it's a tablet layout as well.
## Targeting canary vs non-canary components in live environments
Use the `QA_COOKIES` ENV variable to have the entire test target a `canary` (`staging-canary` or `canary`) or `non-canary` (`staging` or `production`) environment.
Locally, that would mean prepending the ENV variable to your call to bin/qa. To target the `canary` version of that environment:
```shell
QA_COOKIES="gitlab_canary=true" WEBDRIVER_HEADLESS=false bin/qa Test::Instance::Staging <YOUR SPECIFIC TAGS OR TESTS>
```
Alternatively, you may set the cookie to `false` to ensure the `non-canary` version is targeted.
You can also export the cookie for your current session to avoid prepending it each time:
```shell
export QA_COOKIES="gitlab_canary=true"
```
### Updating the cookie within a running spec
Within a specific test, you can target either the `canary` or `non-canary` nodes within live environments, such as `staging` and `production`.
For example, to switch back and forth between the two environments, you could utilize the `target_canary` method:
```ruby
it 'tests toggling between canary and non-canary nodes' do
Runtime::Browser.visit(:gitlab, Page::Main::Login)
# After starting the browser session, use the target_canary method ...
Runtime::Browser::Session.target_canary(true)
Flow::Login.sign_in
verify_session_on_canary(true)
Runtime::Browser::Session.target_canary(false)
# Refresh the page ...
verify_session_on_canary(false)
# Log out and clean up ...
end
def verify_session_on_canary(enable_canary)
Page::Main::Menu.perform do |menu|
aggregate_failures 'testing session log in' do
expect(menu.canary?).to be(enable_canary)
end
end
end
```
You can verify whether GitLab is appropriately redirecting your session to the `canary` or `non-canary` nodes with the `menu.canary?` method.
The above spec is verbose, written specifically this way to ensure the idea behind the implementation is clear. We recommend following the practices detailed within our [Beginner's guide to writing end-to-end tests](beginners_guide.md).
## OpenID Connect (OIDC) tests
To run the [`login_via_oidc_with_gitlab_as_idp_spec`](https://gitlab.com/gitlab-org/gitlab/-/blob/188e2c876a17a097448d7f3ed35bdf264fed0d3b/qa/qa/specs/features/browser_ui/1_manage/login/login_via_oidc_with_gitlab_as_idp_spec.rb) on your local machine:

View File

@ -88,3 +88,5 @@ For example, the parameter `query=gitlab-org/gitlab-foss,gitlab-org/gitlab,gitla
| Cycle time | Median time from the earliest commit of a linked issue's merge request to when that issue is closed. | [VSA overview](https://gitlab.com/groups/gitlab-org/-/analytics/value_stream_analytics) | [View the lead time and cycle time for issues](../group/value_stream_analytics/index.md#key-metrics) |
| New issues | Number of new issues created. | [Issue Analytics](https://gitlab.com/groups/gitlab-org/-/issues_analytics) | Issue analytics [for projects](issue_analytics.md) and [for groups](../../user/group/issues_analytics/index.md) |
| Number of deploys | Total number of deploys to production. | [Merge Request Analytics](https://gitlab.com/gitlab-org/gitlab/-/analytics/merge_request_analytics) | [Merge request analytics](merge_request_analytics.md) |
| Critical vulnerabilities | Total critical vulnerabilities in project or group | [Vulnerability report](https://gitlab.com/gitlab-org/gitlab/-/security/vulnerability_report) | [Vulnerability report](../application_security/vulnerability_report/index.md) |
| High vulnerabilities | Total high vulnerabilities in project or group | [Vulnerability report](https://gitlab.com/gitlab-org/gitlab/-/security/vulnerability_report) | [Vulnerability report](../application_security/vulnerability_report/index.md) |

View File

@ -106,13 +106,14 @@ To use this option, select **Only mirror protected branches** when you create a
## Mirror specific branches
> Mirroring branches matching a regex [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/102608) in GitLab 15.8 [with a flag](../../../../administration/feature_flags.md) named `mirror_only_branches_match_regex`. Disabled by default.
> - Mirroring branches matching a regex [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/102608) in GitLab 15.8 [with a flag](../../../../administration/feature_flags.md) named `mirror_only_branches_match_regex`. Disabled by default.
> - [Enabled by default](https://gitlab.com/gitlab-org/gitlab/-/issues/381667) in GitLab 16.0.
FLAG:
On self-managed GitLab, by default the field `mirror_branch_regex` is not available.
To make it available, ask an administrator to [enable the feature flag](../../../../administration/feature_flags.md)
On self-managed GitLab, by default the field `mirror_branch_regex` is available.
To hide the feature, ask an administrator to [disable the feature flag](../../../../administration/feature_flags.md)
named `mirror_only_branches_match_regex`.
On GitLab.com, this feature is not available.
On GitLab.com, this feature is available.
To mirror only branches with names matching an [re2 regular expression](https://github.com/google/re2/wiki/Syntax),
enter a regular expression into the **Mirror specific branches** field. Branches with names that

View File

@ -73,6 +73,11 @@ module API
Gitlab::UsageDataCounters::KubernetesAgentCounter.increment_event_counts(events)
end
def update_configuration(agent:, config:)
::Clusters::Agents::Authorizations::CiAccess::RefreshService.new(agent, config: config).execute
::Clusters::Agents::Authorizations::UserAccess::RefreshService.new(agent, config: config).execute
end
end
namespace 'internal' do
@ -128,9 +133,7 @@ module API
end
post '/', feature_category: :deployment_management, urgency: :low do
agent = ::Clusters::Agent.find(params[:agent_id])
::Clusters::Agents::Authorizations::CiAccess::RefreshService.new(agent, config: params[:agent_config]).execute
::Clusters::Agents::Authorizations::UserAccess::RefreshService.new(agent, config: params[:agent_config]).execute
update_configuration(agent: agent, config: params[:agent_config])
no_content!
end

View File

@ -26,7 +26,7 @@ variables:
# (SAST, Dependency Scanning, ...)
SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products"
#
FUZZAPI_VERSION: "2"
FUZZAPI_VERSION: "3"
FUZZAPI_IMAGE_SUFFIX: ""
FUZZAPI_IMAGE: api-security

View File

@ -26,7 +26,7 @@ variables:
# (SAST, Dependency Scanning, ...)
SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products"
#
FUZZAPI_VERSION: "2"
FUZZAPI_VERSION: "3"
FUZZAPI_IMAGE_SUFFIX: ""
FUZZAPI_IMAGE: api-security

View File

@ -26,7 +26,7 @@ variables:
# (SAST, Dependency Scanning, ...)
SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products"
#
DAST_API_VERSION: "2"
DAST_API_VERSION: "3"
DAST_API_IMAGE_SUFFIX: ""
DAST_API_IMAGE: api-security

View File

@ -26,7 +26,7 @@ variables:
# (SAST, Dependency Scanning, ...)
SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products"
#
DAST_API_VERSION: "2"
DAST_API_VERSION: "3"
DAST_API_IMAGE_SUFFIX: ""
DAST_API_IMAGE: api-security

View File

@ -14,7 +14,7 @@ stages:
variables:
SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products"
DAST_API_VERSION: "2"
DAST_API_VERSION: "3"
DAST_API_IMAGE_SUFFIX: ""
DAST_API_IMAGE: api-security

View File

@ -255,7 +255,7 @@ dast-runner-validation:
api-security:
extends: .download_images
variables:
SECURE_BINARIES_ANALYZER_VERSION: "2"
SECURE_BINARIES_ANALYZER_VERSION: "3"
only:
variables:
- $SECURE_BINARIES_DOWNLOAD_IMAGES == "true" &&

View File

@ -1,141 +0,0 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
module Importer
class PullRequestReviewImporter
# review - An instance of `Gitlab::GithubImport::Representation::PullRequestReview`
# project - An instance of `Project`
# client - An instance of `Gitlab::GithubImport::Client`
def initialize(review, project, client)
@review = review
@project = project
@client = client
@merge_request = project.merge_requests.find_by_id(review.merge_request_id)
end
def execute
user_finder = GithubImport::UserFinder.new(project, client)
gitlab_user_id = user_finder.user_id_for(review.author)
if gitlab_user_id
add_review_note!(gitlab_user_id)
add_approval!(gitlab_user_id)
add_reviewer!(gitlab_user_id)
else
add_complementary_review_note!(project.creator_id)
end
end
private
attr_reader :review, :merge_request, :project, :client
def add_review_note!(author_id)
return if review.note.empty?
add_note!(author_id, review_note_content)
end
def add_complementary_review_note!(author_id)
return if review.note.empty? && !review.approval?
note_body = MarkdownText.format(
review_note_content,
review.author
)
add_note!(author_id, note_body)
end
def review_note_content
header = "**Review:** #{review.review_type.humanize}"
if review.note.present?
"#{header}\n\n#{review.note}"
else
header
end
end
def add_note!(author_id, note)
note = Note.new(note_attributes(author_id, note))
note.save!
end
def note_attributes(author_id, note, extra = {})
{
importing: true,
noteable_id: merge_request.id,
noteable_type: 'MergeRequest',
project_id: project.id,
author_id: author_id,
note: note,
system: false,
created_at: submitted_at,
updated_at: submitted_at
}.merge(extra)
end
def add_approval!(user_id)
return unless review.review_type == 'APPROVED'
approval_attribues = {
merge_request_id: merge_request.id,
user_id: user_id,
created_at: submitted_at,
updated_at: submitted_at
}
result = ::Approval.insert(
approval_attribues,
returning: [:id],
unique_by: [:user_id, :merge_request_id]
)
if result.rows.present?
add_approval_system_note!(user_id)
end
end
def add_reviewer!(user_id)
return if review_re_requested?(user_id)
::MergeRequestReviewer.create!(
merge_request_id: merge_request.id,
user_id: user_id,
state: ::MergeRequestReviewer.states['reviewed'],
created_at: submitted_at
)
rescue ActiveRecord::RecordNotUnique
# multiple reviews from single person could make a SQL concurrency issue here
nil
end
# rubocop:disable CodeReuse/ActiveRecord
def review_re_requested?(user_id)
# records that were imported on previous stage with "unreviewed" status
MergeRequestReviewer.where(merge_request_id: merge_request.id, user_id: user_id).exists?
end
# rubocop:enable CodeReuse/ActiveRecord
def add_approval_system_note!(user_id)
attributes = note_attributes(
user_id,
'approved this merge request',
system: true,
system_note_metadata: SystemNoteMetadata.new(action: 'approved')
)
Note.create!(attributes)
end
def submitted_at
@submitted_at ||= (review.submitted_at || merge_request.updated_at)
end
end
end
end
end

View File

@ -0,0 +1,141 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
module Importer
module PullRequests
class ReviewImporter
# review - An instance of `Gitlab::GithubImport::Representation::PullRequestReview`
# project - An instance of `Project`
# client - An instance of `Gitlab::GithubImport::Client`
def initialize(review, project, client)
@review = review
@project = project
@client = client
@merge_request = project.merge_requests.find_by_id(review.merge_request_id)
end
def execute
user_finder = GithubImport::UserFinder.new(project, client)
gitlab_user_id = user_finder.user_id_for(review.author)
if gitlab_user_id
add_review_note!(gitlab_user_id)
add_approval!(gitlab_user_id)
add_reviewer!(gitlab_user_id)
else
add_complementary_review_note!(project.creator_id)
end
end
private
attr_reader :review, :merge_request, :project, :client
def add_review_note!(author_id)
return if review.note.empty?
add_note!(author_id, review_note_content)
end
def add_complementary_review_note!(author_id)
return if review.note.empty? && !review.approval?
note_body = MarkdownText.format(
review_note_content,
review.author
)
add_note!(author_id, note_body)
end
def review_note_content
header = "**Review:** #{review.review_type.humanize}"
if review.note.present?
"#{header}\n\n#{review.note}"
else
header
end
end
def add_note!(author_id, note)
note = Note.new(note_attributes(author_id, note))
note.save!
end
def note_attributes(author_id, note, extra = {})
{
importing: true,
noteable_id: merge_request.id,
noteable_type: 'MergeRequest',
project_id: project.id,
author_id: author_id,
note: note,
system: false,
created_at: submitted_at,
updated_at: submitted_at
}.merge(extra)
end
def add_approval!(user_id)
return unless review.review_type == 'APPROVED'
approval_attribues = {
merge_request_id: merge_request.id,
user_id: user_id,
created_at: submitted_at,
updated_at: submitted_at
}
result = ::Approval.insert(
approval_attribues,
returning: [:id],
unique_by: [:user_id, :merge_request_id]
)
add_approval_system_note!(user_id) if result.rows.present?
end
def add_reviewer!(user_id)
return if review_re_requested?(user_id)
::MergeRequestReviewer.create!(
merge_request_id: merge_request.id,
user_id: user_id,
state: ::MergeRequestReviewer.states['reviewed'],
created_at: submitted_at
)
rescue ActiveRecord::RecordNotUnique
# multiple reviews from single person could make a SQL concurrency issue here
nil
end
# rubocop:disable CodeReuse/ActiveRecord
def review_re_requested?(user_id)
# records that were imported on previous stage with "unreviewed" status
MergeRequestReviewer.where(merge_request_id: merge_request.id, user_id: user_id).exists?
end
# rubocop:enable CodeReuse/ActiveRecord
def add_approval_system_note!(user_id)
attributes = note_attributes(
user_id,
'approved this merge request',
system: true,
system_note_metadata: SystemNoteMetadata.new(action: 'approved')
)
Note.create!(attributes)
end
def submitted_at
@submitted_at ||= (review.submitted_at || merge_request.updated_at)
end
end
end
end
end
end

View File

@ -0,0 +1,114 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
module Importer
module PullRequests
class ReviewsImporter
include ParallelScheduling
def initialize(...)
super
@merge_requests_already_imported_cache_key =
"github-importer/merge_request/already-imported/#{project.id}"
end
def importer_class
ReviewImporter
end
def representation_class
Gitlab::GithubImport::Representation::PullRequestReview
end
def sidekiq_worker_class
Gitlab::GithubImport::PullRequests::ImportReviewWorker
end
def collection_method
:pull_request_reviews
end
def object_type
:pull_request_review
end
def id_for_already_imported_cache(review)
review[:id]
end
# The worker can be interrupted, by rate limit for instance,
# in different situations. To avoid requesting already imported data,
# if the worker is interrupted:
# - before importing all reviews of a merge request
# The reviews page is cached with the `PageCounter`, by merge request.
# - before importing all merge requests reviews
# Merge requests that had all the reviews imported are cached with
# `mark_merge_request_reviews_imported`
def each_object_to_import(&_block)
each_review_page do |page, merge_request|
page.objects.each do |review|
review = review.to_h
next if already_imported?(review)
Gitlab::GithubImport::ObjectCounter.increment(project, object_type, :fetched)
review[:merge_request_id] = merge_request.id
review[:merge_request_iid] = merge_request.iid
yield(review)
mark_as_imported(review)
end
end
end
private
attr_reader :merge_requests_already_imported_cache_key
def each_review_page
merge_requests_to_import.find_each do |merge_request|
# The page counter needs to be scoped by merge request to avoid skipping
# pages of reviews from already imported merge requests.
page_counter = PageCounter.new(project, page_counter_id(merge_request))
repo = project.import_source
options = collection_options.merge(page: page_counter.current)
client.each_page(collection_method, repo, merge_request.iid, options) do |page|
next unless page_counter.set(page.number)
yield(page, merge_request)
end
# Avoid unnecessary Redis cache keys after the work is done.
page_counter.expire!
mark_merge_request_reviews_imported(merge_request)
end
end
# Returns only the merge requests that still have reviews to be imported.
def merge_requests_to_import
project.merge_requests.id_not_in(already_imported_merge_requests)
end
def already_imported_merge_requests
Gitlab::Cache::Import::Caching.values_from_set(merge_requests_already_imported_cache_key)
end
def page_counter_id(merge_request)
"merge_request/#{merge_request.id}/#{collection_method}"
end
def mark_merge_request_reviews_imported(merge_request)
Gitlab::Cache::Import::Caching.set_add(
merge_requests_already_imported_cache_key,
merge_request.id
)
end
end
end
end
end
end

View File

@ -1,112 +0,0 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
module Importer
class PullRequestsReviewsImporter
include ParallelScheduling
def initialize(...)
super
@merge_requests_already_imported_cache_key =
"github-importer/merge_request/already-imported/#{project.id}"
end
def importer_class
PullRequestReviewImporter
end
def representation_class
Gitlab::GithubImport::Representation::PullRequestReview
end
def sidekiq_worker_class
ImportPullRequestReviewWorker
end
def collection_method
:pull_request_reviews
end
def object_type
:pull_request_review
end
def id_for_already_imported_cache(review)
review[:id]
end
# The worker can be interrupted, by rate limit for instance,
# in different situations. To avoid requesting already imported data,
# if the worker is interrupted:
# - before importing all reviews of a merge request
# The reviews page is cached with the `PageCounter`, by merge request.
# - before importing all merge requests reviews
# Merge requests that had all the reviews imported are cached with
# `mark_merge_request_reviews_imported`
def each_object_to_import(&block)
each_review_page do |page, merge_request|
page.objects.each do |review|
review = review.to_h
next if already_imported?(review)
Gitlab::GithubImport::ObjectCounter.increment(project, object_type, :fetched)
review[:merge_request_id] = merge_request.id
review[:merge_request_iid] = merge_request.iid
yield(review)
mark_as_imported(review)
end
end
end
private
attr_reader :merge_requests_already_imported_cache_key
def each_review_page
merge_requests_to_import.find_each do |merge_request|
# The page counter needs to be scoped by merge request to avoid skipping
# pages of reviews from already imported merge requests.
page_counter = PageCounter.new(project, page_counter_id(merge_request))
repo = project.import_source
options = collection_options.merge(page: page_counter.current)
client.each_page(collection_method, repo, merge_request.iid, options) do |page|
next unless page_counter.set(page.number)
yield(page, merge_request)
end
# Avoid unnecessary Redis cache keys after the work is done.
page_counter.expire!
mark_merge_request_reviews_imported(merge_request)
end
end
# Returns only the merge requests that still have reviews to be imported.
def merge_requests_to_import
project.merge_requests.id_not_in(already_imported_merge_requests)
end
def already_imported_merge_requests
Gitlab::Cache::Import::Caching.values_from_set(merge_requests_already_imported_cache_key)
end
def page_counter_id(merge_request)
"merge_request/#{merge_request.id}/#{collection_method}"
end
def mark_merge_request_reviews_imported(merge_request)
Gitlab::Cache::Import::Caching.set_add(
merge_requests_already_imported_cache_key,
merge_request.id
)
end
end
end
end
end

View File

@ -5606,10 +5606,13 @@ msgstr ""
msgid "ApprovalRule|Name"
msgstr ""
msgid "ApprovalRule|Newly detected"
msgid "ApprovalRule|Needs Triage"
msgstr ""
msgid "ApprovalRule|Previously detected"
msgid "ApprovalRule|New"
msgstr ""
msgid "ApprovalRule|Previously existing"
msgstr ""
msgid "ApprovalRule|Reduce your time to merge."
@ -13446,6 +13449,9 @@ msgstr ""
msgid "DORA4Metrics|Change failure rate (percentage)"
msgstr ""
msgid "DORA4Metrics|Critical Vulnerabilities"
msgstr ""
msgid "DORA4Metrics|Cycle time"
msgstr ""
@ -13476,6 +13482,9 @@ msgstr ""
msgid "DORA4Metrics|Go to docs"
msgstr ""
msgid "DORA4Metrics|High Vulnerabilities"
msgstr ""
msgid "DORA4Metrics|Lead Time for Changes"
msgstr ""
@ -17019,15 +17028,27 @@ msgstr ""
msgid "Environment|Cluster IP"
msgstr ""
msgid "Environment|CronJobs"
msgstr ""
msgid "Environment|DaemonSets"
msgstr ""
msgid "Environment|Deployment tier"
msgstr ""
msgid "Environment|Deployments"
msgstr ""
msgid "Environment|External IP"
msgstr ""
msgid "Environment|Failed"
msgstr ""
msgid "Environment|Jobs"
msgstr ""
msgid "Environment|Kubernetes overview"
msgstr ""
@ -17040,15 +17061,24 @@ msgstr ""
msgid "Environment|Ports"
msgstr ""
msgid "Environment|ReplicaSets"
msgstr ""
msgid "Environment|Running"
msgstr ""
msgid "Environment|Services"
msgstr ""
msgid "Environment|StatefulSets"
msgstr ""
msgid "Environment|Succeeded"
msgstr ""
msgid "Environment|Summary"
msgstr ""
msgid "Epic"
msgstr ""
@ -23641,6 +23671,9 @@ msgstr ""
msgid "Inherited:"
msgstr ""
msgid "Inheriting from parent is not yet supported"
msgstr ""
msgid "Initial default branch name"
msgstr ""
@ -29672,6 +29705,12 @@ msgstr ""
msgid "No committers"
msgstr ""
msgid "No component has 'gl/inject-editor' attribute"
msgstr ""
msgid "No components present in the devfile"
msgstr ""
msgid "No confirmation email received? Check your spam folder or %{request_link_start}request new confirmation email%{request_link_end}."
msgstr ""
@ -39508,7 +39547,7 @@ msgstr ""
msgid "ScanResultPolicy|Maximum number of severity-criteria is one"
msgstr ""
msgid "ScanResultPolicy|Maximum number of status-criteria is one"
msgid "ScanResultPolicy|Maximum number of status-criteria is two"
msgstr ""
msgid "ScanResultPolicy|New severity"
@ -48850,7 +48889,7 @@ msgstr ""
msgid "Value stream analytics"
msgstr ""
msgid "ValueStreamAnalyticsStage|We don't have enough data to show this stage."
msgid "ValueStreamAnalyticsStage|There are 0 items to show in this stage, for these filters, within this time range."
msgstr ""
msgid "ValueStreamAnalytics|%{stageCount}+ items"
@ -48940,6 +48979,12 @@ msgstr ""
msgid "ValueStreamAnalytics|There was an error while fetching value stream analytics %{requestTypeName} data."
msgstr ""
msgid "ValueStreamAnalytics|Total Critical vulnerabilities."
msgstr ""
msgid "ValueStreamAnalytics|Total High vulnerabilities."
msgstr ""
msgid "ValueStreamAnalytics|Total number of deploys to production."
msgstr ""
@ -49819,9 +49864,6 @@ msgstr ""
msgid "We detected potential spam in the %{humanized_resource_name}. Please solve the reCAPTCHA to proceed."
msgstr ""
msgid "We don't have enough data to show this stage."
msgstr ""
msgid "We found your token in a public project and have automatically revoked it to protect your account."
msgstr ""
@ -53017,6 +53059,12 @@ msgstr ""
msgid "for %{ref}"
msgstr ""
msgid "for Workspace is required to be public"
msgstr ""
msgid "for Workspace must have an associated RemoteDevelopmentAgentConfig"
msgstr ""
msgid "for this project"
msgstr ""
@ -53164,6 +53212,9 @@ msgstr ""
msgid "is blocked by"
msgstr ""
msgid "is currently immutable, and cannot be updated. Create a new agent instead."
msgstr ""
msgid "is forbidden by a top-level group"
msgstr ""

View File

@ -56,7 +56,7 @@
"@gitlab/cluster-client": "^1.2.0",
"@gitlab/favicon-overlay": "2.0.0",
"@gitlab/fonts": "^1.2.0",
"@gitlab/svgs": "3.44.0",
"@gitlab/svgs": "3.45.0",
"@gitlab/ui": "62.9.0",
"@gitlab/visual-review-tools": "1.7.3",
"@gitlab/web-ide": "0.0.1-dev-20230425040132",

View File

@ -8,6 +8,17 @@ module QA
def go_to_create_project_from_template
Page::Project::New.perform(&:click_create_from_template_link)
end
def archive_project(project)
project.visit!
Page::Project::Menu.perform(&:go_to_general_settings)
Page::Project::Settings::Main.perform(&:expand_advanced_settings)
Page::Project::Settings::Advanced.perform(&:archive_project)
Support::Waiter.wait_until do
Page::Project::Show.perform { |show| show.has_text?("Archived project!") }
end
end
end
end
end

View File

@ -107,6 +107,12 @@ module QA
end
end
def go_to_workspaces
within_sidebar do
click_element(:sidebar_menu_link, menu_item: "Workspaces")
end
end
private
def hover_settings

View File

@ -10,6 +10,10 @@ module QA
element :select_namespace_dropdown_search_field
end
view 'app/views/projects/_new_project_fields.html.haml' do
element :project_create_button
end
def import!(gitlab_repo_path, name)
fill_git_repository_url_link(gitlab_repo_path)
fill_project_name(name)
@ -42,7 +46,7 @@ module QA
end
def click_create_button
find('.btn-confirm').click
click_element(:project_create_button)
end
def wait_for_success

View File

@ -60,7 +60,7 @@ module QA
def initialize
wait_for_requests(skip_finished_loading_check: true)
dismiss_file_tree_popover if has_element?(:file_tree_popover)
dismiss_file_tree_popover
super
end

View File

@ -52,7 +52,7 @@ RSpec.describe 'Value Stream Analytics', :js, feature_category: :value_stream_ma
it 'shows active stage with empty message' do
expect(page).to have_selector('.gl-path-active-item-indigo', text: 'Issue')
expect(page).to have_content("We don't have enough data to show this stage.")
expect(page).to have_content("There are 0 items to show in this stage, for these filters, within this time range.")
end
end

View File

@ -218,7 +218,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js, feat
end
end
it 'allows user to quickly scroll to next unresolved thread' do
it 'allows user to quickly scroll to next unresolved thread', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410109' do
page.within '.discussions-counter' do
page.find('.discussion-next-btn').click
end

View File

@ -12,7 +12,8 @@ let trackingSpy = null;
const noDataSvgPath = 'path/to/no/data';
const emptyStateTitle = 'Too much data';
const notEnoughDataError = "We don't have enough data to show this stage.";
const notEnoughDataError =
'There are 0 items to show in this stage, for these filters, within this time range.';
const issueEventItems = issueEvents.events;
const reviewEventItems = reviewEvents.events;
const [firstIssueEvent] = issueEventItems;

View File

@ -14,15 +14,20 @@ const TEST_BUILD_ARTIFACTS_SIZE = 1024;
const TEST_PROJECT_PATH = 'project/path';
const TEST_PROJECT_ID = 'gid://gitlab/Project/22';
const createBuildArtifactsSizeResponse = (buildArtifactsSize) => ({
const createBuildArtifactsSizeResponse = ({
buildArtifactsSize = TEST_BUILD_ARTIFACTS_SIZE,
nullStatistics = false,
}) => ({
data: {
project: {
__typename: 'Project',
id: TEST_PROJECT_ID,
statistics: {
__typename: 'ProjectStatistics',
buildArtifactsSize,
},
statistics: nullStatistics
? null
: {
__typename: 'ProjectStatistics',
buildArtifactsSize,
},
},
},
});
@ -82,28 +87,32 @@ describe('ArtifactsApp component', () => {
});
describe.each`
buildArtifactsSize | expectedText
${TEST_BUILD_ARTIFACTS_SIZE} | ${numberToHumanSize(TEST_BUILD_ARTIFACTS_SIZE)}
${null} | ${SIZE_UNKNOWN}
`('when buildArtifactsSize is $buildArtifactsSize', ({ buildArtifactsSize, expectedText }) => {
beforeEach(async () => {
getBuildArtifactsSizeSpy.mockResolvedValue(
createBuildArtifactsSizeResponse(buildArtifactsSize),
);
buildArtifactsSize | nullStatistics | expectedText
${TEST_BUILD_ARTIFACTS_SIZE} | ${false} | ${numberToHumanSize(TEST_BUILD_ARTIFACTS_SIZE)}
${null} | ${false} | ${SIZE_UNKNOWN}
${null} | ${true} | ${SIZE_UNKNOWN}
`(
'when buildArtifactsSize is $buildArtifactsSize',
({ buildArtifactsSize, nullStatistics, expectedText }) => {
beforeEach(async () => {
getBuildArtifactsSizeSpy.mockResolvedValue(
createBuildArtifactsSizeResponse({ buildArtifactsSize, nullStatistics }),
);
createComponent();
createComponent();
await waitForPromises();
});
await waitForPromises();
});
it('hides loader', () => {
expect(findSkeletonLoader().exists()).toBe(false);
});
it('hides loader', () => {
expect(findSkeletonLoader().exists()).toBe(false);
});
it('shows the size', () => {
expect(findBuildArtifactsSize().text()).toMatchInterpolatedText(
`${TOTAL_ARTIFACTS_SIZE} ${expectedText}`,
);
});
});
it('shows the size', () => {
expect(findBuildArtifactsSize().text()).toMatchInterpolatedText(
`${TOTAL_ARTIFACTS_SIZE} ${expectedText}`,
);
});
},
);
});

View File

@ -865,3 +865,45 @@ export const k8sServicesMock = [
},
},
];
const readyDeployment = {
status: {
conditions: [
{ type: 'Available', status: 'True' },
{ type: 'Progressing', status: 'True' },
],
},
};
const failedDeployment = {
status: {
conditions: [
{ type: 'Available', status: 'False' },
{ type: 'Progressing', status: 'False' },
],
},
};
const readyDaemonSet = {
status: { numberReady: 1, desiredNumberScheduled: 1, numberMisscheduled: 0 },
};
const failedDaemonSet = {
status: { numberMisscheduled: 1, numberReady: 0, desiredNumberScheduled: 1 },
};
const readySet = { spec: { replicas: 2 }, status: { readyReplicas: 2 } };
const failedSet = { spec: { replicas: 2 }, status: { readyReplicas: 1 } };
const completedJob = { spec: { completions: 1 }, status: { succeeded: 1, failed: 0 } };
const failedJob = { spec: { completions: 1 }, status: { succeeded: 0, failed: 1 } };
const completedCronJob = {
spec: { suspend: 0 },
status: { active: 0, lastScheduleTime: new Date().toString() },
};
const suspendedCronJob = { spec: { suspend: 1 }, status: { active: 0, lastScheduleTime: '' } };
const failedCronJob = { spec: { suspend: 0 }, status: { active: 2, lastScheduleTime: '' } };
export const k8sWorkloadsMock = {
DeploymentList: [readyDeployment, failedDeployment],
DaemonSetList: [readyDaemonSet, failedDaemonSet, failedDaemonSet],
StatefulSetList: [readySet, readySet, failedSet],
ReplicaSetList: [readySet, failedSet],
JobList: [completedJob, completedJob, failedJob],
CronJobList: [completedCronJob, suspendedCronJob, failedCronJob],
};

View File

@ -1,5 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
import { CoreV1Api } from '@gitlab/cluster-client';
import { CoreV1Api, AppsV1Api, BatchV1Api } from '@gitlab/cluster-client';
import { s__ } from '~/locale';
import axios from '~/lib/utils/axios_utils';
import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
@ -36,6 +36,7 @@ describe('~/frontend/environments/graphql/resolvers', () => {
headers: { 'GitLab-Agent-Id': '1' },
},
};
const namespace = 'default';
beforeEach(() => {
mockResolvers = resolvers(ENDPOINT);
@ -154,8 +155,6 @@ describe('~/frontend/environments/graphql/resolvers', () => {
});
});
describe('k8sPods', () => {
const namespace = 'default';
const mockPodsListFn = jest.fn().mockImplementation(() => {
return Promise.resolve({
data: {
@ -234,6 +233,92 @@ describe('~/frontend/environments/graphql/resolvers', () => {
);
});
});
describe('k8sWorkloads', () => {
const emptyImplementation = jest.fn().mockImplementation(() => {
return Promise.resolve({
data: {
items: [],
},
});
});
const [
mockNamespacedDeployment,
mockNamespacedDaemonSet,
mockNamespacedStatefulSet,
mockNamespacedReplicaSet,
mockNamespacedJob,
mockNamespacedCronJob,
mockAllDeployment,
mockAllDaemonSet,
mockAllStatefulSet,
mockAllReplicaSet,
mockAllJob,
mockAllCronJob,
] = Array(12).fill(emptyImplementation);
const namespacedMocks = [
{ method: 'listAppsV1NamespacedDeployment', api: AppsV1Api, spy: mockNamespacedDeployment },
{ method: 'listAppsV1NamespacedDaemonSet', api: AppsV1Api, spy: mockNamespacedDaemonSet },
{ method: 'listAppsV1NamespacedStatefulSet', api: AppsV1Api, spy: mockNamespacedStatefulSet },
{ method: 'listAppsV1NamespacedReplicaSet', api: AppsV1Api, spy: mockNamespacedReplicaSet },
{ method: 'listBatchV1NamespacedJob', api: BatchV1Api, spy: mockNamespacedJob },
{ method: 'listBatchV1NamespacedCronJob', api: BatchV1Api, spy: mockNamespacedCronJob },
];
const allMocks = [
{ method: 'listAppsV1DeploymentForAllNamespaces', api: AppsV1Api, spy: mockAllDeployment },
{ method: 'listAppsV1DaemonSetForAllNamespaces', api: AppsV1Api, spy: mockAllDaemonSet },
{ method: 'listAppsV1StatefulSetForAllNamespaces', api: AppsV1Api, spy: mockAllStatefulSet },
{ method: 'listAppsV1ReplicaSetForAllNamespaces', api: AppsV1Api, spy: mockAllReplicaSet },
{ method: 'listBatchV1JobForAllNamespaces', api: BatchV1Api, spy: mockAllJob },
{ method: 'listBatchV1CronJobForAllNamespaces', api: BatchV1Api, spy: mockAllCronJob },
];
beforeEach(() => {
[...namespacedMocks, ...allMocks].forEach((workloadMock) => {
jest
.spyOn(workloadMock.api.prototype, workloadMock.method)
.mockImplementation(workloadMock.spy);
});
});
it('should request namespaced workload types from the cluster_client library if namespace is specified', async () => {
await mockResolvers.Query.k8sWorkloads(null, { configuration, namespace });
namespacedMocks.forEach((workloadMock) => {
expect(workloadMock.spy).toHaveBeenCalledWith(namespace);
});
});
it('should request all workload types from the cluster_client library if namespace is not specified', async () => {
await mockResolvers.Query.k8sWorkloads(null, { configuration, namespace: '' });
allMocks.forEach((workloadMock) => {
expect(workloadMock.spy).toHaveBeenCalled();
});
});
it('should pass fulfilled calls data if one of the API calls fail', async () => {
jest
.spyOn(AppsV1Api.prototype, 'listAppsV1DeploymentForAllNamespaces')
.mockRejectedValue(new Error('API error'));
await expect(
mockResolvers.Query.k8sWorkloads(null, { configuration }),
).resolves.toBeDefined();
});
it('should throw an error if all the API calls fail', async () => {
[...allMocks].forEach((workloadMock) => {
jest
.spyOn(workloadMock.api.prototype, workloadMock.method)
.mockRejectedValue(new Error('API error'));
});
await expect(mockResolvers.Query.k8sWorkloads(null, { configuration })).rejects.toThrow(
'API error',
);
});
});
describe('stopEnvironmentREST', () => {
it('should post to the stop environment path', async () => {
mock.onPost(ENDPOINT).reply(HTTP_STATUS_OK);

View File

@ -107,6 +107,7 @@ describe('~/environments/components/kubernetes_overview.vue', () => {
it('renders kubernetes tabs', () => {
expect(findKubernetesTabs().props()).toEqual({
namespace: agent.kubernetesNamespace,
configuration,
});
});

View File

@ -0,0 +1,115 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { GlLoadingIcon, GlTab, GlBadge } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
import KubernetesSummary from '~/environments/components/kubernetes_summary.vue';
import { mockKasTunnelUrl } from './mock_data';
import { k8sWorkloadsMock } from './graphql/mock_data';
Vue.use(VueApollo);
describe('~/environments/components/kubernetes_summary.vue', () => {
let wrapper;
const namespace = 'my-kubernetes-namespace';
const configuration = {
basePath: mockKasTunnelUrl,
baseOptions: {
headers: { 'GitLab-Agent-Id': '1' },
},
};
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findTab = () => wrapper.findComponent(GlTab);
const findSummaryListItem = (at) => wrapper.findAllByTestId('summary-list-item').at(at);
const createApolloProvider = () => {
const mockResolvers = {
Query: {
k8sWorkloads: jest.fn().mockReturnValue(k8sWorkloadsMock),
},
};
return createMockApollo([], mockResolvers);
};
const createWrapper = (apolloProvider = createApolloProvider()) => {
wrapper = shallowMountExtended(KubernetesSummary, {
propsData: { configuration, namespace },
apolloProvider,
stubs: {
GlTab,
GlBadge,
},
});
};
describe('mounted', () => {
it('renders summary tab', () => {
createWrapper();
expect(findTab().text()).toMatchInterpolatedText(`${KubernetesSummary.i18n.summaryTitle} 0`);
});
it('shows the loading icon', () => {
createWrapper();
expect(findLoadingIcon().exists()).toBe(true);
});
describe('when workloads data is loaded', () => {
beforeEach(async () => {
await createWrapper();
await waitForPromises();
});
it('hides the loading icon when the list of workload types loaded', () => {
expect(findLoadingIcon().exists()).toBe(false);
});
it.each`
type | successText | successCount | failedCount | suspendedCount | index
${'Deployments'} | ${'ready'} | ${1} | ${1} | ${0} | ${0}
${'DaemonSets'} | ${'ready'} | ${1} | ${2} | ${0} | ${1}
${'StatefulSets'} | ${'ready'} | ${2} | ${1} | ${0} | ${2}
${'ReplicaSets'} | ${'ready'} | ${1} | ${1} | ${0} | ${3}
${'Jobs'} | ${'completed'} | ${2} | ${1} | ${0} | ${4}
${'CronJobs'} | ${'ready'} | ${1} | ${1} | ${1} | ${5}
`(
'populates view with the correct badges for workload type $type',
({ type, successText, successCount, failedCount, suspendedCount, index }) => {
const findAllBadges = () => findSummaryListItem(index).findAllComponents(GlBadge);
const findBadgeByVariant = (variant) =>
findAllBadges().wrappers.find((badge) => badge.props('variant') === variant);
expect(findSummaryListItem(index).text()).toContain(type);
expect(findBadgeByVariant('success').text()).toBe(`${successCount} ${successText}`);
expect(findBadgeByVariant('danger').text()).toBe(`${failedCount} failed`);
if (suspendedCount > 0) {
expect(findBadgeByVariant('neutral').text()).toBe(`${suspendedCount} suspended`);
}
},
);
});
it('emits an error message when gets an error from the cluster_client API', async () => {
const error = new Error('Error from the cluster_client API');
const createErroredApolloProvider = () => {
const mockResolvers = {
Query: {
k8sWorkloads: jest.fn().mockRejectedValueOnce(error),
},
};
return createMockApollo([], mockResolvers);
};
createWrapper(createErroredApolloProvider());
await waitForPromises();
expect(wrapper.emitted('cluster-error')).toEqual([[error]]);
});
});
});

View File

@ -1,12 +1,13 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { shallowMount } from '@vue/test-utils';
import { GlLoadingIcon, GlTabs, GlTab, GlTable, GlPagination } from '@gitlab/ui';
import { GlLoadingIcon, GlTabs, GlTab, GlTable, GlPagination, GlBadge } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { stubComponent } from 'helpers/stub_component';
import { useFakeDate } from 'helpers/fake_date';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
import KubernetesTabs from '~/environments/components/kubernetes_tabs.vue';
import KubernetesSummary from '~/environments/components/kubernetes_summary.vue';
import { SERVICES_LIMIT_PER_PAGE } from '~/environments/constants';
import { mockKasTunnelUrl } from './mock_data';
import { k8sServicesMock } from './graphql/mock_data';
@ -16,6 +17,7 @@ Vue.use(VueApollo);
describe('~/environments/components/kubernetes_tabs.vue', () => {
let wrapper;
const namespace = 'my-kubernetes-namespace';
const configuration = {
basePath: mockKasTunnelUrl,
baseOptions: {
@ -25,9 +27,10 @@ describe('~/environments/components/kubernetes_tabs.vue', () => {
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findTabs = () => wrapper.findComponent(GlTabs);
const findTab = (at) => wrapper.findAllComponents(GlTab).at(at);
const findTab = () => wrapper.findComponent(GlTab);
const findTable = () => wrapper.findComponent(GlTable);
const findPagination = () => wrapper.findComponent(GlPagination);
const findKubernetesSummary = () => wrapper.findComponent(KubernetesSummary);
const createApolloProvider = () => {
const mockResolvers = {
@ -40,14 +43,15 @@ describe('~/environments/components/kubernetes_tabs.vue', () => {
};
const createWrapper = (apolloProvider = createApolloProvider()) => {
wrapper = shallowMount(KubernetesTabs, {
propsData: { configuration },
wrapper = shallowMountExtended(KubernetesTabs, {
propsData: { configuration, namespace },
apolloProvider,
stubs: {
GlTab,
GlTable: stubComponent(GlTable, {
props: ['items', 'per-page'],
}),
GlBadge,
},
});
};
@ -59,10 +63,16 @@ describe('~/environments/components/kubernetes_tabs.vue', () => {
expect(findTabs().exists()).toBe(true);
});
it('renders summary tab', () => {
createWrapper();
expect(findKubernetesSummary().props()).toEqual({ namespace, configuration });
});
it('renders services tab', () => {
createWrapper();
expect(findTab(0).text()).toMatchInterpolatedText(`${KubernetesTabs.i18n.servicesTitle} 0`);
expect(findTab().text()).toMatchInterpolatedText(`${KubernetesTabs.i18n.servicesTitle} 0`);
});
});

View File

@ -2,12 +2,13 @@
require 'spec_helper'
RSpec.describe Types::UserPreferencesType do
RSpec.describe Types::UserPreferencesType, feature_category: :user_profile do
specify { expect(described_class.graphql_name).to eq('UserPreferences') }
it 'exposes the expected fields' do
expected_fields = %i[
issues_sort
visibility_pipeline_id_type
]
expect(described_class).to have_graphql_fields(*expected_fields)

View File

@ -50,7 +50,7 @@ RSpec.describe GitlabSchema.types['User'], feature_category: :user_profile do
user_achievements
]
expect(described_class).to have_graphql_fields(*expected_fields)
expect(described_class).to include_graphql_fields(*expected_fields)
end
describe 'name field' do

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Types::VisibilityPipelineIdTypeEnum, feature_category: :user_profile do
specify { expect(described_class.graphql_name).to eq('VisibilityPipelineIdType') }
it 'exposes all visibility pipeline id types' do
expect(described_class.values.keys).to contain_exactly(
*UserPreference.visibility_pipeline_id_types.keys.map(&:upcase)
)
end
end

View File

@ -2,14 +2,14 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
:clean_gitlab_redis_cache, feature_category: :importers do
RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewImporter,
:clean_gitlab_redis_cache, feature_category: :importers do
using RSpec::Parameterized::TableSyntax
let_it_be(:merge_request) { create(:merge_request) }
let(:project) { merge_request.project }
let(:submitted_at) { Time.new(2017, 1, 1, 12, 00).utc }
let(:submitted_at) { Time.new(2017, 1, 1, 12).utc }
let(:client_double) do
instance_double(
'Gitlab::GithubImport::Client',
@ -21,7 +21,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
shared_examples 'imports a reviewer for the Merge Request' do
it 'creates reviewer for the Merge Request' do
expect { subject.execute }.to change(MergeRequestReviewer, :count).by(1)
expect { subject.execute }.to change { MergeRequestReviewer.count }.by(1)
expect(merge_request.reviewers).to contain_exactly(author)
end
@ -35,7 +35,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
end
it 'does not change Merge Request reviewers' do
expect { subject.execute }.not_to change(MergeRequestReviewer, :count)
expect { subject.execute }.not_to change { MergeRequestReviewer.count }
expect(merge_request.reviewers).to contain_exactly(author)
end
@ -48,7 +48,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
end
it 'does not change Merge Request reviewers', :aggregate_failures do
expect { subject.execute }.not_to change(MergeRequestReviewer, :count)
expect { subject.execute }.not_to change { MergeRequestReviewer.count }
expect(merge_request.reviewers).to contain_exactly(author)
end
@ -57,7 +57,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
shared_examples 'imports an approval for the Merge Request' do
it 'creates an approval for the Merge Request' do
expect { subject.execute }.to change(Approval, :count).by(1)
expect { subject.execute }.to change { Approval.count }.by(1)
expect(merge_request.approved_by_users.reload).to include(author)
expect(merge_request.approvals.last.created_at).to eq(submitted_at)
@ -75,7 +75,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
it_behaves_like 'imports a reviewer for the Merge Request'
it 'creates a note for the review' do
expect { subject.execute }.to change(Note, :count).by(1)
expect { subject.execute }.to change { Note.count }.by(1)
last_note = merge_request.notes.last
expect(last_note.note).to eq('approved this merge request')
@ -91,8 +91,8 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
it 'does not import second approve and note' do
expect { subject.execute }
.to change(Note, :count).by(0)
.and change(Approval, :count).by(0)
.to change { Note.count }.by(0)
.and change { Approval.count }.by(0)
end
end
end
@ -103,7 +103,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
it_behaves_like 'imports a reviewer for the Merge Request'
it 'does not create note for the review' do
expect { subject.execute }.not_to change(Note, :count)
expect { subject.execute }.not_to change { Note.count }
end
end
@ -113,7 +113,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
it_behaves_like 'imports a reviewer for the Merge Request'
it 'does not create a note for the review' do
expect { subject.execute }.not_to change(Note, :count)
expect { subject.execute }.not_to change { Note.count }
end
end
end
@ -126,7 +126,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
it_behaves_like 'imports a reviewer for the Merge Request'
it 'creates a note for the review' do
expect { subject.execute }.to change(Note, :count).by(2)
expect { subject.execute }.to change { Note.count }.by(2)
note = merge_request.notes.where(system: false).last
expect(note.note).to eq("**Review:** Approved\n\nnote")
@ -146,7 +146,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
it 'creates a note for the review' do
expect { subject.execute }
.to change(Note, :count).by(1)
.to change { Note.count }.by(1)
.and not_change(Approval, :count)
last_note = merge_request.notes.last
@ -162,7 +162,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
it 'creates a note for the review' do
expect { subject.execute }
.to change(Note, :count).by(1)
.to change { Note.count }.by(1)
.and not_change(Approval, :count)
last_note = merge_request.notes.last
@ -182,7 +182,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
it 'creates a note for the review with *Approved by by<author>*' do
expect { subject.execute }
.to change(Note, :count).by(1)
.to change { Note.count }.by(1)
last_note = merge_request.notes.last
expect(last_note.note).to eq("*Created by: author*\n\n**Review:** Approved")
@ -195,7 +195,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
let(:review) { create_review(type: 'COMMENTED', note: '') }
it 'creates a note for the review with *Commented by<author>*' do
expect { subject.execute }.not_to change(Note, :count)
expect { subject.execute }.not_to change { Note.count }
end
end
@ -203,7 +203,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
let(:review) { create_review(type: 'CHANGES_REQUESTED', note: '') }
it 'creates a note for the review with *Changes requested by <author>*' do
expect { subject.execute }.not_to change(Note, :count)
expect { subject.execute }.not_to change { Note.count }
end
end
end
@ -213,7 +213,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
it 'creates a note for the review without the author information' do
expect { subject.execute }
.to change(Note, :count).by(1)
.to change { Note.count }.by(1)
last_note = merge_request.notes.last
expect(last_note.note).to eq('**Review:** Approved')
@ -231,7 +231,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
it 'creates a note for the review with the author username' do
expect { subject.execute }
.to change(Note, :count).by(1)
.to change { Note.count }.by(1)
last_note = merge_request.notes.last
expect(last_note.note).to eq("*Created by: author*\n\n**Review:** Approved")
expect(last_note.author).to eq(project.creator)
@ -243,7 +243,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
let(:review) { create_review(type: 'APPROVED', note: '', submitted_at: nil) }
it 'creates a note for the review without the author information' do
expect { subject.execute }.to change(Note, :count).by(1)
expect { subject.execute }.to change { Note.count }.by(1)
last_note = merge_request.notes.last
@ -258,7 +258,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
it 'creates a note for the review with *Approved by by<author>*' do
expect { subject.execute }
.to change(Note, :count).by(1)
.to change { Note.count }.by(1)
last_note = merge_request.notes.last
@ -273,7 +273,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
it 'creates a note for the review with *Commented by<author>*' do
expect { subject.execute }
.to change(Note, :count).by(1)
.to change { Note.count }.by(1)
last_note = merge_request.notes.last
@ -288,7 +288,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter,
it 'creates a note for the review with *Changes requested by <author>*' do
expect { subject.execute }
.to change(Note, :count).by(1)
.to change { Note.count }.by(1)
last_note = merge_request.notes.last

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewsImporter, feature_category: :importers do
let(:client) { double }
let(:project) { create(:project, import_source: 'github/repo') }
@ -15,13 +15,21 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
end
describe '#importer_class' do
it { expect(subject.importer_class).to eq(Gitlab::GithubImport::Importer::PullRequestReviewImporter) }
it { expect(subject.importer_class).to eq(Gitlab::GithubImport::Importer::PullRequests::ReviewImporter) }
end
describe '#sidekiq_worker_class' do
it { expect(subject.sidekiq_worker_class).to eq(Gitlab::GithubImport::PullRequests::ImportReviewWorker) }
end
describe '#collection_method' do
it { expect(subject.collection_method).to eq(:pull_request_reviews) }
end
describe '#object_type' do
it { expect(subject.object_type).to eq(:pull_request_review) }
end
describe '#id_for_already_imported_cache' do
it { expect(subject.id_for_already_imported_cache({ id: 1 })).to eq(1) }
end
@ -39,7 +47,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
let(:review) { { id: 1 } }
it 'fetches the pull requests reviews data' do
page = double(objects: [review], number: 1)
page = Struct.new(:objects, :number).new([review], 1)
expect(client)
.to receive(:each_page)
@ -50,7 +58,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
expect { |b| subject.each_object_to_import(&b) }
.to yield_with_args(review)
subject.each_object_to_import {}
subject.each_object_to_import
expect(review[:merge_request_id]).to eq(merge_request.id)
expect(review[:merge_request_iid]).to eq(merge_request.iid)
@ -68,7 +76,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
.exactly(:once) # ensure to be cached on the second call
.with(:pull_request_reviews, 'github/repo', merge_request.iid, { page: 2 })
subject.each_object_to_import {}
subject.each_object_to_import
end
it 'skips cached merge requests' do
@ -81,7 +89,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
expect(client).not_to receive(:each_page)
subject.each_object_to_import {}
subject.each_object_to_import
end
end
end

View File

@ -515,6 +515,7 @@ project:
- cluster_agents
- ci_access_project_authorizations
- cluster_project
- workspaces
- creator
- cycle_analytics_stages
- value_streams

View File

@ -63,6 +63,12 @@ RSpec.describe UserPreference, feature_category: :user_profile do
end
describe 'visibility_pipeline_id_type' do
it 'is set to 0 by default' do
pref = described_class.new
expect(pref.visibility_pipeline_id_type).to eq('id')
end
it { is_expected.to define_enum_for(:visibility_pipeline_id_type).with_values(id: 0, iid: 1) }
end
end

View File

@ -11,7 +11,8 @@ RSpec.describe Mutations::UserPreferences::Update, feature_category: :user_profi
let(:input) do
{
'issuesSort' => sort_value
'issuesSort' => sort_value,
'visibilityPipelineIdType' => 'IID'
}
end
@ -24,15 +25,20 @@ RSpec.describe Mutations::UserPreferences::Update, feature_category: :user_profi
expect(response).to have_gitlab_http_status(:success)
expect(mutation_response['userPreferences']['issuesSort']).to eq(sort_value)
expect(mutation_response['userPreferences']['visibilityPipelineIdType']).to eq('IID')
expect(current_user.user_preference.persisted?).to eq(true)
expect(current_user.user_preference.issues_sort).to eq(Types::IssueSortEnum.values[sort_value].value.to_s)
expect(current_user.user_preference.visibility_pipeline_id_type).to eq('iid')
end
end
context 'when user has existing preference' do
before do
current_user.create_user_preference!(issues_sort: Types::IssueSortEnum.values['TITLE_DESC'].value)
current_user.create_user_preference!(
issues_sort: Types::IssueSortEnum.values['TITLE_DESC'].value,
visibility_pipeline_id_type: 'id'
)
end
it 'updates the existing value' do
@ -42,8 +48,10 @@ RSpec.describe Mutations::UserPreferences::Update, feature_category: :user_profi
expect(response).to have_gitlab_http_status(:success)
expect(mutation_response['userPreferences']['issuesSort']).to eq(sort_value)
expect(mutation_response['userPreferences']['visibilityPipelineIdType']).to eq('IID')
expect(current_user.user_preference.issues_sort).to eq(Types::IssueSortEnum.values[sort_value].value.to_s)
expect(current_user.user_preference.visibility_pipeline_id_type).to eq('iid')
end
end
end

View File

@ -10,6 +10,12 @@ RSpec.describe 'User', feature_category: :user_profile do
shared_examples 'a working user query' do
it_behaves_like 'a working graphql query' do
before do
# TODO: This license stub is necessary because the remote development workspaces field
# defined in the EE version of UserInterface gets picked up here and thus the license
# check happens. This comes from the `ancestors` call in
# lib/graphql/schema/member/has_fields.rb#fields in the graphql library.
stub_licensed_features(remote_development: true)
post_graphql(query, current_user: current_user)
end
end

View File

@ -85,7 +85,7 @@ RSpec.describe Import::GithubFailureEntity, feature_category: :importers do
context 'with `pull_request_review` failure' do
it_behaves_like 'import failure entity' do
let(:source) { 'Gitlab::GithubImport::Importer::PullRequestReviewImporter' }
let(:source) { 'Gitlab::GithubImport::Importer::PullRequests::ReviewImporter' }
let(:title) { 'Pull request review 123456' }
let(:provider_url) { 'https://github.com/example/repo/pull/2#pullrequestreview-123456' }
let(:github_identifiers) do

View File

@ -639,7 +639,11 @@ module GraphqlHelpers
end
def expect_graphql_errors_to_be_empty
expect(flattened_errors).to be_empty
# TODO: using eq([]) instead of be_empty makes it print out the full error message including the
# raisedAt key which contains the full stacktrace. This is necessary to know where the
# unexpected error occurred during tests.
# This or an equivalent fix should be added in a separate MR on master.
expect(flattened_errors).to eq([])
end
# Helps migrate to the new GraphQL interpreter,

View File

@ -114,7 +114,7 @@ module LoginHelpers
def login_via(provider, user, uid, remember_me: false, additional_info: {})
mock_auth_hash(provider, uid, user.email, additional_info: additional_info)
visit new_user_session_path
expect(page).to have_content('Sign in with')
expect(page).to have_css('.omniauth-container')
check 'remember_me_omniauth' if remember_me

View File

@ -45,6 +45,10 @@ RSpec.shared_examples "a user type with merge request interaction type" do
user_achievements
]
# TODO: 'workspaces' needs to be included, but only when this spec is run in EE context, to account for the
# ee-only extension in ee/app/graphql/ee/types/user_interface.rb. Not sure how else to handle this.
expected_fields << 'workspaces' if Gitlab.ee?
expect(described_class).to have_graphql_fields(*expected_fields)
end

View File

@ -43,9 +43,9 @@ RSpec.describe 'admin/sessions/new.html.haml' do
it 'shows omniauth form' do
render
expect(rendered).to have_css('.omniauth-container')
expect(rendered).to have_content _('Sign in with')
expect(rendered).not_to have_content _('No authentication methods configured.')
expect(rendered).to have_content _('or')
expect(rendered).to have_css('.omniauth-container')
end
end

Some files were not shown because too many files have changed in this diff Show More