Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
f8d20b20be
commit
ad9eb72915
45
CHANGELOG.md
45
CHANGELOG.md
|
|
@ -4,7 +4,21 @@ entry.
|
|||
|
||||
## 13.2.3 (2020-08-05)
|
||||
|
||||
- No changes.
|
||||
### Security (12 changes)
|
||||
|
||||
- Update kramdown gem to version 2.3.0.
|
||||
- Enforce 2FA on Doorkeeper controllers.
|
||||
- Revoke OAuth grants when a user revokes an application.
|
||||
- Refresh project authorizations when transferring groups.
|
||||
- Stop excess logs from failure to send invite email when group no longer exists.
|
||||
- Verify confirmed email for OAuth Authorize POST endpoint.
|
||||
- Fix XSS in Markdown reference tooltips.
|
||||
- Fix XSS in milestone tooltips.
|
||||
- Fix xss vulnerability on jobs view.
|
||||
- Block 40-character hexadecimal branches.
|
||||
- Prevent a temporary access escalation before group memberships are recalculated when specialized project share workers are enabled.
|
||||
- Update GitLab Runner Helm Chart to 0.18.2.
|
||||
|
||||
|
||||
## 13.2.2 (2020-07-29)
|
||||
|
||||
|
|
@ -1035,7 +1049,20 @@ entry.
|
|||
|
||||
## 13.1.6 (2020-08-05)
|
||||
|
||||
- No changes.
|
||||
### Security (11 changes)
|
||||
|
||||
- Add decompressed archive size validation on Project/Group Import. !562
|
||||
- Enforce 2FA on Doorkeeper controllers.
|
||||
- Refresh project authorizations when transferring groups.
|
||||
- Stop excess logs from failure to send invite email when group no longer exists.
|
||||
- Verify confirmed email for OAuth Authorize POST endpoint.
|
||||
- Revoke OAuth grants when a user revokes an application.
|
||||
- Fix XSS in Markdown reference tooltips.
|
||||
- Fix XSS in milestone tooltips.
|
||||
- Fix xss vulnerability on jobs view.
|
||||
- Block 40-character hexadecimal branches.
|
||||
- Update GitLab Runner Helm Chart to 0.17.2.
|
||||
|
||||
|
||||
## 13.1.5 (2020-07-23)
|
||||
|
||||
|
|
@ -1573,7 +1600,19 @@ entry.
|
|||
|
||||
## 13.0.12 (2020-08-05)
|
||||
|
||||
- No changes.
|
||||
### Security (10 changes)
|
||||
|
||||
- Add decompressed archive size validation on Project/Group Import. !562
|
||||
- Enforce 2FA on Doorkeeper controllers.
|
||||
- Refresh project authorizations when transferring groups.
|
||||
- Stop excess logs from failure to send invite email when group no longer exists.
|
||||
- Verify confirmed email for OAuth Authorize POST endpoint.
|
||||
- Revoke OAuth grants when a user revokes an application.
|
||||
- Fix XSS in Markdown reference tooltips.
|
||||
- Fix XSS in milestone tooltips.
|
||||
- Fix xss vulnerability on jobs view.
|
||||
- Block 40-character hexadecimal branches.
|
||||
|
||||
|
||||
## 13.0.11 (2020-08-05)
|
||||
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
c6fdcae2d1c5d4914a010dfe7ea5dbfcfb8bdabf
|
||||
1bd1bfa6673eb784b856d580240f8e5522b86467
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
13.4.0
|
||||
13.5.0
|
||||
|
|
|
|||
2
Gemfile
2
Gemfile
|
|
@ -144,7 +144,7 @@ gem 'deckar01-task_list', '2.3.1'
|
|||
gem 'gitlab-markup', '~> 1.7.1'
|
||||
gem 'github-markup', '~> 1.7.0', require: 'github/markup'
|
||||
gem 'commonmarker', '~> 0.20'
|
||||
gem 'kramdown', '~> 2.2.1'
|
||||
gem 'kramdown', '~> 2.3.0'
|
||||
gem 'RedCloth', '~> 4.3.2'
|
||||
gem 'rdoc', '~> 6.1.2'
|
||||
gem 'org-ruby', '~> 0.9.12'
|
||||
|
|
|
|||
|
|
@ -598,7 +598,7 @@ GEM
|
|||
kgio (2.11.3)
|
||||
knapsack (1.17.0)
|
||||
rake
|
||||
kramdown (2.2.1)
|
||||
kramdown (2.3.0)
|
||||
rexml
|
||||
kramdown-parser-gfm (1.1.0)
|
||||
kramdown (~> 2.0)
|
||||
|
|
@ -1308,7 +1308,7 @@ DEPENDENCIES
|
|||
jwt (~> 2.1.0)
|
||||
kaminari (~> 1.0)
|
||||
knapsack (~> 1.17)
|
||||
kramdown (~> 2.2.1)
|
||||
kramdown (~> 2.3.0)
|
||||
kubeclient (~> 4.6.0)
|
||||
letter_opener_web (~> 1.3.4)
|
||||
license_finder (~> 5.4)
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
<script>
|
||||
import { GlDeprecatedButton, GlFormGroup, GlFormInput, GlFormCheckbox } from '@gitlab/ui';
|
||||
import { GlButton, GlFormGroup, GlFormInput, GlFormCheckbox } from '@gitlab/ui';
|
||||
import { mapState, mapActions } from 'vuex';
|
||||
import Icon from '~/vue_shared/components/icon.vue';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
GlDeprecatedButton,
|
||||
GlButton,
|
||||
GlFormCheckbox,
|
||||
GlFormGroup,
|
||||
GlFormInput,
|
||||
|
|
@ -58,7 +58,7 @@ export default {
|
|||
<h3 class="js-section-header h4">
|
||||
{{ s__('GrafanaIntegration|Grafana authentication') }}
|
||||
</h3>
|
||||
<gl-deprecated-button class="js-settings-toggle">{{ __('Expand') }}</gl-deprecated-button>
|
||||
<gl-button class="js-settings-toggle">{{ __('Expand') }}</gl-button>
|
||||
<p class="js-section-sub-header">
|
||||
{{ s__('GrafanaIntegration|Embed Grafana charts in GitLab issues.') }}
|
||||
</p>
|
||||
|
|
@ -93,9 +93,9 @@ export default {
|
|||
</a>
|
||||
</p>
|
||||
</gl-form-group>
|
||||
<gl-deprecated-button variant="success" @click="updateGrafanaIntegration">
|
||||
<gl-button variant="success" category="primary" @click="updateGrafanaIntegration">
|
||||
{{ __('Save Changes') }}
|
||||
</gl-deprecated-button>
|
||||
</gl-button>
|
||||
</form>
|
||||
</div>
|
||||
</section>
|
||||
|
|
|
|||
|
|
@ -1,11 +1,15 @@
|
|||
<script>
|
||||
import { escape, isEmpty } from 'lodash';
|
||||
import { isEmpty } from 'lodash';
|
||||
import CiIcon from '~/vue_shared/components/ci_icon.vue';
|
||||
import { sprintf, __ } from '../../locale';
|
||||
import { __ } from '../../locale';
|
||||
import { GlSprintf, GlLink } from '@gitlab/ui';
|
||||
|
||||
export default {
|
||||
creatingEnvironment: 'creating',
|
||||
components: {
|
||||
CiIcon,
|
||||
GlSprintf,
|
||||
GlLink,
|
||||
},
|
||||
props: {
|
||||
deploymentStatus: {
|
||||
|
|
@ -31,7 +35,7 @@ export default {
|
|||
return this.outOfDateEnvironmentMessage();
|
||||
case 'failed':
|
||||
return this.failedEnvironmentMessage();
|
||||
case 'creating':
|
||||
case this.$options.creatingEnvironment:
|
||||
return this.creatingEnvironmentMessage();
|
||||
default:
|
||||
return '';
|
||||
|
|
@ -39,17 +43,12 @@ export default {
|
|||
},
|
||||
environmentLink() {
|
||||
if (this.hasEnvironment) {
|
||||
return sprintf(
|
||||
'%{startLink}%{name}%{endLink}',
|
||||
{
|
||||
startLink: `<a href="${this.deploymentStatus.environment.environment_path}" class="js-environment-link">`,
|
||||
name: escape(this.deploymentStatus.environment.name),
|
||||
endLink: '</a>',
|
||||
},
|
||||
false,
|
||||
);
|
||||
return {
|
||||
link: this.deploymentStatus.environment.environment_path,
|
||||
name: this.deploymentStatus.environment.name,
|
||||
};
|
||||
}
|
||||
return '';
|
||||
return {};
|
||||
},
|
||||
hasLastDeployment() {
|
||||
return this.hasEnvironment && this.deploymentStatus.environment.last_deployment;
|
||||
|
|
@ -74,201 +73,107 @@ export default {
|
|||
}
|
||||
|
||||
const { name, path } = this.deploymentCluster;
|
||||
const escapedName = escape(name);
|
||||
const escapedPath = escape(path);
|
||||
|
||||
if (!escapedPath) {
|
||||
return escapedName;
|
||||
}
|
||||
|
||||
return sprintf(
|
||||
'%{startLink}%{name}%{endLink}',
|
||||
{
|
||||
startLink: `<a href="${escapedPath}" class="js-job-cluster-link">`,
|
||||
name: escapedName,
|
||||
endLink: '</a>',
|
||||
},
|
||||
false,
|
||||
);
|
||||
return {
|
||||
path,
|
||||
name,
|
||||
};
|
||||
},
|
||||
kubernetesNamespace() {
|
||||
return this.hasCluster ? this.deploymentCluster.kubernetes_namespace : null;
|
||||
},
|
||||
deploymentLink() {
|
||||
return {
|
||||
path: this.lastDeploymentPath,
|
||||
name:
|
||||
this.deploymentStatus.status === this.$options.creatingEnvironment
|
||||
? __('latest deployment')
|
||||
: __('most recent deployment'),
|
||||
};
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
deploymentLink(name) {
|
||||
return sprintf(
|
||||
'%{startLink}%{name}%{endLink}',
|
||||
{
|
||||
startLink: `<a href="${this.lastDeploymentPath}" class="js-job-deployment-link">`,
|
||||
name,
|
||||
endLink: '</a>',
|
||||
},
|
||||
false,
|
||||
);
|
||||
},
|
||||
failedEnvironmentMessage() {
|
||||
const { environmentLink } = this;
|
||||
|
||||
return sprintf(
|
||||
__('The deployment of this job to %{environmentLink} did not succeed.'),
|
||||
{ environmentLink },
|
||||
false,
|
||||
);
|
||||
return __('The deployment of this job to %{environmentLink} did not succeed.');
|
||||
},
|
||||
lastEnvironmentMessage() {
|
||||
const { environmentLink, clusterNameOrLink, hasCluster, kubernetesNamespace } = this;
|
||||
if (hasCluster) {
|
||||
if (kubernetesNamespace) {
|
||||
return sprintf(
|
||||
__(
|
||||
'This job is deployed to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}.',
|
||||
),
|
||||
{ environmentLink, clusterNameOrLink, kubernetesNamespace },
|
||||
false,
|
||||
if (this.hasCluster) {
|
||||
if (this.kubernetesNamespace) {
|
||||
return __(
|
||||
'This job is deployed to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}.',
|
||||
);
|
||||
}
|
||||
// we know the cluster but not the namespace
|
||||
return sprintf(
|
||||
__('This job is deployed to %{environmentLink} using cluster %{clusterNameOrLink}.'),
|
||||
{ environmentLink, clusterNameOrLink },
|
||||
false,
|
||||
);
|
||||
return __('This job is deployed to %{environmentLink} using cluster %{clusterNameOrLink}.');
|
||||
}
|
||||
// not a cluster deployment
|
||||
return sprintf(__('This job is deployed to %{environmentLink}.'), { environmentLink }, false);
|
||||
return __('This job is deployed to %{environmentLink}.');
|
||||
},
|
||||
outOfDateEnvironmentMessage() {
|
||||
const {
|
||||
hasLastDeployment,
|
||||
hasCluster,
|
||||
environmentLink,
|
||||
clusterNameOrLink,
|
||||
kubernetesNamespace,
|
||||
} = this;
|
||||
|
||||
if (hasLastDeployment) {
|
||||
const deploymentLink = this.deploymentLink(__('most recent deployment'));
|
||||
if (hasCluster) {
|
||||
if (kubernetesNamespace) {
|
||||
return sprintf(
|
||||
__(
|
||||
'This job is an out-of-date deployment to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}. View the %{deploymentLink}.',
|
||||
),
|
||||
{ environmentLink, clusterNameOrLink, kubernetesNamespace, deploymentLink },
|
||||
false,
|
||||
if (this.hasLastDeployment) {
|
||||
if (this.hasCluster) {
|
||||
if (this.kubernetesNamespace) {
|
||||
return __(
|
||||
'This job is an out-of-date deployment to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}. View the %{deploymentLink}.',
|
||||
);
|
||||
}
|
||||
// we know the cluster but not the namespace
|
||||
return sprintf(
|
||||
__(
|
||||
'This job is an out-of-date deployment to %{environmentLink} using cluster %{clusterNameOrLink}. View the %{deploymentLink}.',
|
||||
),
|
||||
{ environmentLink, clusterNameOrLink, deploymentLink },
|
||||
false,
|
||||
return __(
|
||||
'This job is an out-of-date deployment to %{environmentLink} using cluster %{clusterNameOrLink}. View the %{deploymentLink}.',
|
||||
);
|
||||
}
|
||||
// not a cluster deployment
|
||||
return sprintf(
|
||||
__(
|
||||
'This job is an out-of-date deployment to %{environmentLink}. View the %{deploymentLink}.',
|
||||
),
|
||||
{ environmentLink, deploymentLink },
|
||||
false,
|
||||
return __(
|
||||
'This job is an out-of-date deployment to %{environmentLink}. View the %{deploymentLink}.',
|
||||
);
|
||||
}
|
||||
// no last deployment, i.e. this is the first deployment
|
||||
if (hasCluster) {
|
||||
if (kubernetesNamespace) {
|
||||
return sprintf(
|
||||
__(
|
||||
'This job is an out-of-date deployment to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}.',
|
||||
),
|
||||
{ environmentLink, clusterNameOrLink, kubernetesNamespace },
|
||||
false,
|
||||
if (this.hasCluster) {
|
||||
if (this.kubernetesNamespace) {
|
||||
return __(
|
||||
'This job is an out-of-date deployment to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}.',
|
||||
);
|
||||
}
|
||||
// we know the cluster but not the namespace
|
||||
return sprintf(
|
||||
__(
|
||||
'This job is an out-of-date deployment to %{environmentLink} using cluster %{clusterNameOrLink}.',
|
||||
),
|
||||
{ environmentLink, clusterNameOrLink },
|
||||
false,
|
||||
return __(
|
||||
'This job is an out-of-date deployment to %{environmentLink} using cluster %{clusterNameOrLink}.',
|
||||
);
|
||||
}
|
||||
// not a cluster deployment
|
||||
return sprintf(
|
||||
__('This job is an out-of-date deployment to %{environmentLink}.'),
|
||||
{ environmentLink },
|
||||
false,
|
||||
);
|
||||
return __('This job is an out-of-date deployment to %{environmentLink}.');
|
||||
},
|
||||
creatingEnvironmentMessage() {
|
||||
const {
|
||||
hasLastDeployment,
|
||||
hasCluster,
|
||||
environmentLink,
|
||||
clusterNameOrLink,
|
||||
kubernetesNamespace,
|
||||
} = this;
|
||||
|
||||
if (hasLastDeployment) {
|
||||
const deploymentLink = this.deploymentLink(__('latest deployment'));
|
||||
if (hasCluster) {
|
||||
if (kubernetesNamespace) {
|
||||
return sprintf(
|
||||
__(
|
||||
'This job is creating a deployment to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}. This will overwrite the %{deploymentLink}.',
|
||||
),
|
||||
{ environmentLink, clusterNameOrLink, kubernetesNamespace, deploymentLink },
|
||||
false,
|
||||
if (this.hasLastDeployment) {
|
||||
if (this.hasCluster) {
|
||||
if (this.kubernetesNamespace) {
|
||||
return __(
|
||||
'This job is creating a deployment to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}. This will overwrite the %{deploymentLink}.',
|
||||
);
|
||||
}
|
||||
// we know the cluster but not the namespace
|
||||
return sprintf(
|
||||
__(
|
||||
'This job is creating a deployment to %{environmentLink} using cluster %{clusterNameOrLink}. This will overwrite the %{deploymentLink}.',
|
||||
),
|
||||
{ environmentLink, clusterNameOrLink, deploymentLink },
|
||||
false,
|
||||
return __(
|
||||
'This job is creating a deployment to %{environmentLink} using cluster %{clusterNameOrLink}. This will overwrite the %{deploymentLink}.',
|
||||
);
|
||||
}
|
||||
// not a cluster deployment
|
||||
return sprintf(
|
||||
__(
|
||||
'This job is creating a deployment to %{environmentLink}. This will overwrite the %{deploymentLink}.',
|
||||
),
|
||||
{ environmentLink, deploymentLink },
|
||||
false,
|
||||
return __(
|
||||
'This job is creating a deployment to %{environmentLink}. This will overwrite the %{deploymentLink}.',
|
||||
);
|
||||
}
|
||||
// no last deployment, i.e. this is the first deployment
|
||||
if (hasCluster) {
|
||||
if (kubernetesNamespace) {
|
||||
return sprintf(
|
||||
__(
|
||||
'This job is creating a deployment to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}.',
|
||||
),
|
||||
{ environmentLink, clusterNameOrLink, kubernetesNamespace },
|
||||
false,
|
||||
if (this.hasCluster) {
|
||||
if (this.kubernetesNamespace) {
|
||||
return __(
|
||||
'This job is creating a deployment to %{environmentLink} using cluster %{clusterNameOrLink} and namespace %{kubernetesNamespace}.',
|
||||
);
|
||||
}
|
||||
// we know the cluster but not the namespace
|
||||
return sprintf(
|
||||
__(
|
||||
'This job is creating a deployment to %{environmentLink} using cluster %{clusterNameOrLink}.',
|
||||
),
|
||||
{ environmentLink, clusterNameOrLink },
|
||||
false,
|
||||
return __(
|
||||
'This job is creating a deployment to %{environmentLink} using cluster %{clusterNameOrLink}.',
|
||||
);
|
||||
}
|
||||
// not a cluster deployment
|
||||
return sprintf(
|
||||
__('This job is creating a deployment to %{environmentLink}.'),
|
||||
{ environmentLink },
|
||||
false,
|
||||
);
|
||||
return __('This job is creating a deployment to %{environmentLink}.');
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
@ -277,7 +182,37 @@ export default {
|
|||
<div class="gl-mt-3 gl-mb-3 js-environment-container">
|
||||
<div class="environment-information">
|
||||
<ci-icon :status="iconStatus" />
|
||||
<p class="inline gl-mb-0" v-html="environment"></p>
|
||||
<p class="inline gl-mb-0">
|
||||
<gl-sprintf :message="environment">
|
||||
<template #environmentLink>
|
||||
<gl-link
|
||||
v-if="hasEnvironment"
|
||||
:href="environmentLink.link"
|
||||
data-testid="job-environment-link"
|
||||
v-text="environmentLink.name"
|
||||
/>
|
||||
</template>
|
||||
<template #clusterNameOrLink>
|
||||
<gl-link
|
||||
v-if="clusterNameOrLink.path"
|
||||
:href="clusterNameOrLink.path"
|
||||
data-testid="job-cluster-link"
|
||||
v-text="clusterNameOrLink.name"
|
||||
/>
|
||||
<template v-else>{{ clusterNameOrLink.name }}</template>
|
||||
</template>
|
||||
<template #kubernetesNamespace>
|
||||
<template>{{ kubernetesNamespace }}</template>
|
||||
</template>
|
||||
<template #deploymentLink>
|
||||
<gl-link
|
||||
:href="deploymentLink.path"
|
||||
data-testid="job-deployment-link"
|
||||
v-text="deploymentLink.name"
|
||||
/>
|
||||
</template>
|
||||
</gl-sprintf>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -1,10 +1,13 @@
|
|||
<script>
|
||||
import { GlLink } from '@gitlab/ui';
|
||||
import { GlAlert, GlBadge, GlLink } from '@gitlab/ui';
|
||||
import { s__ } from '../../locale';
|
||||
/**
|
||||
* Renders Stuck Runners block for job's view.
|
||||
*/
|
||||
export default {
|
||||
components: {
|
||||
GlAlert,
|
||||
GlBadge,
|
||||
GlLink,
|
||||
},
|
||||
props: {
|
||||
|
|
@ -22,40 +25,50 @@ export default {
|
|||
required: true,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
hasNoRunnersWithCorrespondingTags() {
|
||||
return this.tags.length > 0;
|
||||
},
|
||||
stuckData() {
|
||||
if (this.hasNoRunnersWithCorrespondingTags) {
|
||||
return {
|
||||
text: s__(`Job|This job is stuck because you don't have
|
||||
any active runners online or available with any of these tags assigned to them:`),
|
||||
dataTestId: 'job-stuck-with-tags',
|
||||
showTags: true,
|
||||
};
|
||||
} else if (this.hasNoRunnersForProject) {
|
||||
return {
|
||||
text: s__(`Job|This job is stuck because the project
|
||||
doesn't have any runners online assigned to it.`),
|
||||
dataTestId: 'job-stuck-no-runners',
|
||||
showTags: false,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
text: s__(`Job|This job is stuck because you don't
|
||||
have any active runners that can run this job.`),
|
||||
dataTestId: 'job-stuck-no-active-runners',
|
||||
showTags: false,
|
||||
};
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<div class="bs-callout bs-callout-warning">
|
||||
<p v-if="tags.length" class="gl-mb-0" data-testid="job-stuck-with-tags">
|
||||
{{
|
||||
s__(`This job is stuck because you don't have
|
||||
any active runners online or available with any of these tags assigned to them:`)
|
||||
}}
|
||||
<span
|
||||
v-for="(tag, index) in tags"
|
||||
:key="index"
|
||||
class="badge badge-primary gl-mr-2"
|
||||
data-testid="badge"
|
||||
>
|
||||
{{ tag }}
|
||||
</span>
|
||||
<gl-alert variant="warning" :dismissible="false">
|
||||
<p class="gl-mb-0" :data-testid="stuckData.dataTestId">
|
||||
{{ stuckData.text }}
|
||||
<template v-if="stuckData.showTags">
|
||||
<gl-badge v-for="tag in tags" :key="tag" variant="info">
|
||||
{{ tag }}
|
||||
</gl-badge>
|
||||
</template>
|
||||
</p>
|
||||
<p v-else-if="hasNoRunnersForProject" class="gl-mb-0" data-testid="job-stuck-no-runners">
|
||||
{{
|
||||
s__(`Job|This job is stuck because the project
|
||||
doesn't have any runners online assigned to it.`)
|
||||
}}
|
||||
</p>
|
||||
<p v-else class="gl-mb-0" data-testid="job-stuck-no-active-runners">
|
||||
{{
|
||||
s__(`This job is stuck because you don't
|
||||
have any active runners that can run this job.`)
|
||||
}}
|
||||
</p>
|
||||
|
||||
{{ __('Go to project') }}
|
||||
<gl-link v-if="runnersPath" :href="runnersPath">
|
||||
{{ __('CI settings') }}
|
||||
</gl-link>
|
||||
</div>
|
||||
</gl-alert>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -68,6 +68,11 @@ export default {
|
|||
snippetHasBinary() {
|
||||
return Boolean(this.snippet.blobs.find(blob => blob.binary));
|
||||
},
|
||||
authoredMessage() {
|
||||
return this.snippet.author
|
||||
? __('Authored %{timeago} by %{author}')
|
||||
: __('Authored %{timeago}');
|
||||
},
|
||||
personalSnippetActions() {
|
||||
return [
|
||||
{
|
||||
|
|
@ -178,8 +183,8 @@ export default {
|
|||
</span>
|
||||
<gl-icon :name="visibilityLevelIcon" :size="14" />
|
||||
</div>
|
||||
<div class="creator">
|
||||
<gl-sprintf :message="__('Authored %{timeago} by %{author}')">
|
||||
<div class="creator" data-testid="authored-message">
|
||||
<gl-sprintf :message="authoredMessage">
|
||||
<template #timeago>
|
||||
<time-ago-tooltip
|
||||
:time="snippet.createdAt"
|
||||
|
|
|
|||
|
|
@ -12,10 +12,17 @@ module EnforcesTwoFactorAuthentication
|
|||
|
||||
included do
|
||||
before_action :check_two_factor_requirement
|
||||
helper_method :two_factor_grace_period_expired?, :two_factor_skippable?
|
||||
|
||||
# to include this in controllers inheriting from `ActionController::Metal`
|
||||
# we need to add this block
|
||||
if respond_to?(:helper_method)
|
||||
helper_method :two_factor_grace_period_expired?, :two_factor_skippable?
|
||||
end
|
||||
end
|
||||
|
||||
def check_two_factor_requirement
|
||||
return unless respond_to?(:current_user)
|
||||
|
||||
if two_factor_authentication_required? && current_user_requires_two_factor?
|
||||
redirect_to profile_two_factor_auth_path
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
class Oauth::ApplicationsController < Doorkeeper::ApplicationsController
|
||||
include Gitlab::GonHelper
|
||||
include Gitlab::Allowable
|
||||
include PageLayoutHelper
|
||||
include OauthApplications
|
||||
include Gitlab::Experimentation::ControllerConcern
|
||||
|
|
@ -19,8 +18,6 @@ class Oauth::ApplicationsController < Doorkeeper::ApplicationsController
|
|||
|
||||
around_action :set_locale
|
||||
|
||||
helper_method :can?
|
||||
|
||||
layout 'profile'
|
||||
|
||||
def index
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ class Oauth::AuthorizationsController < Doorkeeper::AuthorizationsController
|
|||
include Gitlab::Experimentation::ControllerConcern
|
||||
include InitializesCurrentUserMode
|
||||
|
||||
before_action :verify_confirmed_email!, only: [:new]
|
||||
before_action :verify_confirmed_email!
|
||||
|
||||
layout 'profile'
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ class Oauth::AuthorizedApplicationsController < Doorkeeper::AuthorizedApplicatio
|
|||
if params[:token_id].present?
|
||||
current_resource_owner.oauth_authorized_tokens.find(params[:token_id]).revoke
|
||||
else
|
||||
Doorkeeper::AccessToken.revoke_all_for(params[:id], current_resource_owner)
|
||||
Doorkeeper::Application.revoke_tokens_and_grants_for(params[:id], current_resource_owner)
|
||||
end
|
||||
|
||||
redirect_to applications_profile_url,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Oauth::TokenInfoController < Doorkeeper::TokenInfoController
|
||||
include EnforcesTwoFactorAuthentication
|
||||
|
||||
def show
|
||||
if doorkeeper_token && doorkeeper_token.accessible?
|
||||
token_json = doorkeeper_token.as_json
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Oauth::TokensController < Doorkeeper::TokensController
|
||||
include EnforcesTwoFactorAuthentication
|
||||
end
|
||||
|
|
@ -73,9 +73,8 @@ module Repositories
|
|||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
def create_file!(oid, size)
|
||||
uploaded_file = UploadedFile.from_params(
|
||||
params, :file, LfsObjectUploader.workhorse_local_upload_path)
|
||||
return unless uploaded_file
|
||||
uploaded_file = params[:file]
|
||||
return unless uploaded_file.is_a?(UploadedFile)
|
||||
|
||||
LfsObject.create!(oid: oid, size: size, file: uploaded_file)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ module IssuablesHelper
|
|||
def sidebar_milestone_tooltip_label(milestone)
|
||||
return _('Milestone') unless milestone.present?
|
||||
|
||||
[milestone[:title], sidebar_milestone_remaining_days(milestone) || _('Milestone')].join('<br/>')
|
||||
[escape_once(milestone[:title]), sidebar_milestone_remaining_days(milestone) || _('Milestone')].join('<br/>')
|
||||
end
|
||||
|
||||
def sidebar_milestone_remaining_days(milestone)
|
||||
|
|
|
|||
|
|
@ -13,6 +13,8 @@ module Emails
|
|||
@member_source_type = member_source_type
|
||||
@member_id = member_id
|
||||
|
||||
return unless member_exists?
|
||||
|
||||
user = User.find(recipient_id)
|
||||
|
||||
member_email_with_layout(
|
||||
|
|
@ -24,6 +26,8 @@ module Emails
|
|||
@member_source_type = member_source_type
|
||||
@member_id = member_id
|
||||
|
||||
return unless member_exists?
|
||||
|
||||
member_email_with_layout(
|
||||
to: member.user.notification_email_for(notification_group),
|
||||
subject: subject("Access to the #{member_source.human_name} #{member_source.model_name.singular} was granted"))
|
||||
|
|
@ -45,6 +49,8 @@ module Emails
|
|||
@member_id = member_id
|
||||
@token = token
|
||||
|
||||
return unless member_exists?
|
||||
|
||||
member_email_with_layout(
|
||||
to: member.invite_email,
|
||||
subject: subject("Invitation to join the #{member_source.human_name} #{member_source.model_name.singular}"))
|
||||
|
|
@ -53,6 +59,8 @@ module Emails
|
|||
def member_invite_accepted_email(member_source_type, member_id)
|
||||
@member_source_type = member_source_type
|
||||
@member_id = member_id
|
||||
|
||||
return unless member_exists?
|
||||
return unless member.created_by
|
||||
|
||||
member_email_with_layout(
|
||||
|
|
@ -74,9 +82,11 @@ module Emails
|
|||
subject: subject('Invitation declined'))
|
||||
end
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def member
|
||||
@member ||= Member.find(@member_id)
|
||||
@member ||= Member.find_by(id: @member_id)
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
def member_source
|
||||
@member_source ||= member.source
|
||||
|
|
@ -88,6 +98,11 @@ module Emails
|
|||
|
||||
private
|
||||
|
||||
def member_exists?
|
||||
Gitlab::AppLogger.info("Tried to send an email invitation for a deleted group. Member id: #{@member_id}") if member.blank?
|
||||
member.present?
|
||||
end
|
||||
|
||||
def member_source_class
|
||||
@member_source_type.classify.constantize
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
module Clusters
|
||||
module Applications
|
||||
class Runner < ApplicationRecord
|
||||
VERSION = '0.19.1'
|
||||
VERSION = '0.19.2'
|
||||
|
||||
self.table_name = 'clusters_applications_runners'
|
||||
|
||||
|
|
|
|||
|
|
@ -218,6 +218,24 @@ module Clusters
|
|||
provider&.status_name || connection_status.presence || :created
|
||||
end
|
||||
|
||||
def connection_error
|
||||
with_reactive_cache do |data|
|
||||
data[:connection_error]
|
||||
end
|
||||
end
|
||||
|
||||
def node_connection_error
|
||||
with_reactive_cache do |data|
|
||||
data[:node_connection_error]
|
||||
end
|
||||
end
|
||||
|
||||
def metrics_connection_error
|
||||
with_reactive_cache do |data|
|
||||
data[:metrics_connection_error]
|
||||
end
|
||||
end
|
||||
|
||||
def connection_status
|
||||
with_reactive_cache do |data|
|
||||
data[:connection_status]
|
||||
|
|
@ -233,9 +251,7 @@ module Clusters
|
|||
def calculate_reactive_cache
|
||||
return unless enabled?
|
||||
|
||||
gitlab_kubernetes_nodes = Gitlab::Kubernetes::Node.new(self)
|
||||
|
||||
{ connection_status: retrieve_connection_status, nodes: gitlab_kubernetes_nodes.all.presence }
|
||||
connection_data.merge(Gitlab::Kubernetes::Node.new(self).all)
|
||||
end
|
||||
|
||||
def persisted_applications
|
||||
|
|
@ -395,9 +411,10 @@ module Clusters
|
|||
@instance_domain ||= Gitlab::CurrentSettings.auto_devops_domain
|
||||
end
|
||||
|
||||
def retrieve_connection_status
|
||||
def connection_data
|
||||
result = ::Gitlab::Kubernetes::KubeClient.graceful_request(id) { kubeclient.core_client.discover }
|
||||
result[:status]
|
||||
|
||||
{ connection_status: result[:status], connection_error: result[:connection_error] }.compact
|
||||
end
|
||||
|
||||
# To keep backward compatibility with AUTO_DEVOPS_DOMAIN
|
||||
|
|
|
|||
|
|
@ -20,4 +20,8 @@ class ClusterEntity < Grape::Entity
|
|||
expose :gitlab_managed_apps_logs_path do |cluster|
|
||||
Clusters::ClusterPresenter.new(cluster, current_user: request.current_user).gitlab_managed_apps_logs_path # rubocop: disable CodeReuse/Presenter
|
||||
end
|
||||
|
||||
expose :kubernetes_errors do |cluster|
|
||||
ClusterErrorEntity.new(cluster)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ClusterErrorEntity < Grape::Entity
|
||||
expose :connection_error
|
||||
expose :metrics_connection_error
|
||||
expose :node_connection_error
|
||||
end
|
||||
|
|
@ -11,6 +11,7 @@ class ClusterSerializer < BaseSerializer
|
|||
:enabled,
|
||||
:environment_scope,
|
||||
:gitlab_managed_apps_logs_path,
|
||||
:kubernetes_errors,
|
||||
:name,
|
||||
:nodes,
|
||||
:path,
|
||||
|
|
|
|||
|
|
@ -6,9 +6,10 @@ module AuthorizedProjectUpdate
|
|||
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
def initialize(project, group)
|
||||
def initialize(project, group, group_access = nil)
|
||||
@project = project
|
||||
@group = group
|
||||
@group_access = group_access
|
||||
end
|
||||
|
||||
def execute
|
||||
|
|
@ -19,19 +20,20 @@ module AuthorizedProjectUpdate
|
|||
user_ids_to_delete = []
|
||||
|
||||
members.each do |member|
|
||||
new_access_level = access_level(member.access_level)
|
||||
existing_access_level = existing_authorizations[member.user_id]
|
||||
|
||||
if existing_access_level
|
||||
# User might already have access to the project unrelated to the
|
||||
# current project share
|
||||
next if existing_access_level >= member.access_level
|
||||
next if existing_access_level >= new_access_level
|
||||
|
||||
user_ids_to_delete << member.user_id
|
||||
end
|
||||
|
||||
authorizations_to_create << { user_id: member.user_id,
|
||||
project_id: project.id,
|
||||
access_level: member.access_level }
|
||||
access_level: new_access_level }
|
||||
end
|
||||
|
||||
update_authorizations(user_ids_to_delete, authorizations_to_create)
|
||||
|
|
@ -42,7 +44,15 @@ module AuthorizedProjectUpdate
|
|||
|
||||
private
|
||||
|
||||
attr_reader :project, :group
|
||||
attr_reader :project, :group, :group_access
|
||||
|
||||
def access_level(membership_access_level)
|
||||
return membership_access_level unless group_access
|
||||
|
||||
# access level must not be higher than the max access level set when
|
||||
# creating the project share
|
||||
[membership_access_level, group_access].min
|
||||
end
|
||||
|
||||
def existing_project_authorizations(members)
|
||||
user_ids = members.map(&:user_id)
|
||||
|
|
|
|||
|
|
@ -37,6 +37,7 @@ module Groups
|
|||
|
||||
# Overridden in EE
|
||||
def post_update_hooks(updated_project_ids)
|
||||
refresh_project_authorizations
|
||||
end
|
||||
|
||||
def ensure_allowed_transfer
|
||||
|
|
@ -136,6 +137,16 @@ module Groups
|
|||
@group.add_owner(current_user)
|
||||
end
|
||||
|
||||
def refresh_project_authorizations
|
||||
ProjectAuthorization.where(project_id: @group.all_projects.select(:id)).delete_all # rubocop: disable CodeReuse/ActiveRecord
|
||||
|
||||
# refresh authorized projects for current_user immediately
|
||||
current_user.refresh_authorized_projects
|
||||
|
||||
# schedule refreshing projects for all the members of the group
|
||||
@group.refresh_members_authorized_projects
|
||||
end
|
||||
|
||||
def raise_transfer_error(message)
|
||||
raise TransferError, localized_error_messages[message]
|
||||
end
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ module Projects
|
|||
)
|
||||
|
||||
if link.save
|
||||
setup_authorizations(group)
|
||||
setup_authorizations(group, link.group_access)
|
||||
success(link: link)
|
||||
else
|
||||
error(link.errors.full_messages.to_sentence, 409)
|
||||
|
|
@ -22,9 +22,10 @@ module Projects
|
|||
|
||||
private
|
||||
|
||||
def setup_authorizations(group)
|
||||
def setup_authorizations(group, group_access = nil)
|
||||
if Feature.enabled?(:specialized_project_authorization_project_share_worker)
|
||||
AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker.perform_async(project.id, group.id)
|
||||
AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker.perform_async(
|
||||
project.id, group.id, group_access)
|
||||
|
||||
# AuthorizedProjectsWorker uses an exclusive lease per user but
|
||||
# specialized workers might have synchronization issues. Until we
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@
|
|||
"value": ""
|
||||
},
|
||||
{
|
||||
"field" : "SECURE_ANALYZER_IMAGE_TAG",
|
||||
"field" : "SAST_ANALYZER_IMAGE_TAG",
|
||||
"label" : "Image tag",
|
||||
"type": "string",
|
||||
"options": [],
|
||||
|
|
|
|||
|
|
@ -10,12 +10,13 @@ module AuthorizedProjectUpdate
|
|||
|
||||
idempotent!
|
||||
|
||||
def perform(project_id, group_id)
|
||||
def perform(project_id, group_id, group_access = nil)
|
||||
project = Project.find(project_id)
|
||||
group = Group.find(group_id)
|
||||
|
||||
AuthorizedProjectUpdate::ProjectGroupLinkCreateService.new(project, group)
|
||||
.execute
|
||||
AuthorizedProjectUpdate::ProjectGroupLinkCreateService
|
||||
.new(project, group, group_access)
|
||||
.execute
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -16,6 +16,9 @@ class AuthorizedProjectsWorker
|
|||
if Rails.env.test?
|
||||
def self.bulk_perform_and_wait(args_list, timeout: 10)
|
||||
end
|
||||
|
||||
def self.bulk_perform_inline(args_list)
|
||||
end
|
||||
end
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Use the uploaded file set by middleware in Repositories::LfsStorageController
|
||||
merge_request: 38167
|
||||
author:
|
||||
type: changed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Display authored message correctly on public snippets viewed by unauthenticated users
|
||||
merge_request: 38614
|
||||
author:
|
||||
type: fixed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Change the job stuck page to use UI library components
|
||||
merge_request: 38618
|
||||
author:
|
||||
type: changed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add dashboard_path to PrometheusMetric
|
||||
merge_request: 38237
|
||||
author:
|
||||
type: added
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Update gitlab-shell to v13.5.0
|
||||
merge_request: 38720
|
||||
author:
|
||||
type: added
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Update GitLab Runner Helm Chart to 0.19.2
|
||||
merge_request:
|
||||
author:
|
||||
type: security
|
||||
|
|
@ -25,7 +25,8 @@ Rails.application.routes.draw do
|
|||
controllers applications: 'oauth/applications',
|
||||
authorized_applications: 'oauth/authorized_applications',
|
||||
authorizations: 'oauth/authorizations',
|
||||
token_info: 'oauth/token_info'
|
||||
token_info: 'oauth/token_info',
|
||||
tokens: 'oauth/tokens'
|
||||
end
|
||||
|
||||
# This prefixless path is required because Jira gets confused if we set it up with a path
|
||||
|
|
|
|||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddDashboardPathToPrometheusMetrics < ActiveRecord::Migration[6.0]
|
||||
DOWNTIME = false
|
||||
|
||||
def up
|
||||
# Text limit is added in 20200730210506_add_text_limit_to_dashboard_path
|
||||
add_column :prometheus_metrics, :dashboard_path, :text # rubocop:disable Migration/AddLimitToTextColumns
|
||||
end
|
||||
|
||||
def down
|
||||
remove_column :prometheus_metrics, :dashboard_path
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddTextLimitToDashboardPath < ActiveRecord::Migration[6.0]
|
||||
include Gitlab::Database::MigrationHelpers
|
||||
|
||||
DOWNTIME = false
|
||||
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_text_limit(:prometheus_metrics, :dashboard_path, 2048)
|
||||
end
|
||||
|
||||
def down
|
||||
remove_text_limit(:prometheus_metrics, :dashboard_path)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
5f841d2032b55f01e944c50070a6bb102883c2e4da7ba155fdcf2e90f3b68707
|
||||
|
|
@ -0,0 +1 @@
|
|||
85eb0a510cdb4b315aef1665f05ad3b93d5d39f4ddfe11ed5ddde63aa732f874
|
||||
|
|
@ -14657,7 +14657,9 @@ CREATE TABLE public.prometheus_metrics (
|
|||
created_at timestamp with time zone NOT NULL,
|
||||
updated_at timestamp with time zone NOT NULL,
|
||||
common boolean DEFAULT false NOT NULL,
|
||||
identifier character varying
|
||||
identifier character varying,
|
||||
dashboard_path text,
|
||||
CONSTRAINT check_0ad9f01463 CHECK ((char_length(dashboard_path) <= 2048))
|
||||
);
|
||||
|
||||
CREATE SEQUENCE public.prometheus_metrics_id_seq
|
||||
|
|
|
|||
Binary file not shown.
|
After Width: | Height: | Size: 92 KiB |
|
|
@ -14,7 +14,15 @@ For those new to the GitLab GraphQL API, see
|
|||
- Get an [introduction to GraphQL from graphql.org](https://graphql.org/).
|
||||
- GitLab supports a wide range of resources, listed in the [GraphQL API Reference](reference/index.md).
|
||||
|
||||
#### GraphiQL
|
||||
### Examples
|
||||
|
||||
To work with sample queries that pull data from public projects on GitLab.com,
|
||||
see the menu options in the left-hand
|
||||
documentation menu, under API > GraphQL at `https://docs.gitlab.com/ee/api/graphql/`.
|
||||
|
||||
The [Getting started](getting_started.md) page includes different methods to customize GraphQL queries.
|
||||
|
||||
### GraphiQL
|
||||
|
||||
Explore the GraphQL API using the interactive [GraphiQL explorer](https://gitlab.com/-/graphql-explorer),
|
||||
or on your self-managed GitLab instance on
|
||||
|
|
|
|||
|
|
@ -0,0 +1,44 @@
|
|||
# Identify issue boards with GraphQL
|
||||
|
||||
This page describes how you can use the GraphiQL explorer to identify
|
||||
existing issue boards in the `gitlab-docs` documentation repository.
|
||||
|
||||
## Set up the GraphiQL explorer
|
||||
|
||||
This procedure presents a substantive example that you can copy and paste into your own
|
||||
instance of the [GraphiQL explorer](https://gitlab.com/-/graphql-explorer):
|
||||
|
||||
1. Copy the following code excerpt:
|
||||
|
||||
```graphql
|
||||
query {
|
||||
project(fullPath: "gitlab-org/gitlab-docs") {
|
||||
name
|
||||
forksCount
|
||||
statistics {
|
||||
wikiSize
|
||||
}
|
||||
issuesEnabled
|
||||
boards {
|
||||
nodes {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
1. Open the [GraphiQL Explorer](https://gitlab.com/-/graphql-explorer) page.
|
||||
1. Paste the `query` listed above into the left window of your GraphiQL explorer tool.
|
||||
1. Click Play to get the result shown here:
|
||||
|
||||

|
||||
|
||||
If you want to view one of these boards, take one of the numeric identifiers shown in the output. From the screenshot, the first identifier is `105011`. Navigate to the following URL, which includes the identifier:
|
||||
|
||||
```markdown
|
||||
https://gitlab.com/gitlab-org/gitlab-docs/-/boards/105011
|
||||
```
|
||||
|
||||
For more information on each attribute, see the [GraphQL API Resources](reference/index.md).
|
||||
|
|
@ -3479,10 +3479,6 @@ job split into three separate jobs.
|
|||
#### Parallel `matrix` jobs
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/15356) in GitLab 13.3.
|
||||
> - It's deployed behind a feature flag, disabled by default.
|
||||
> - It's enabled on GitLab.com.
|
||||
> - It can't be enabled or disabled per-project.
|
||||
> - It's recommended for production use.
|
||||
|
||||
`matrix:` allows you to configure different variables for jobs that are running in parallel.
|
||||
There can be from 2 to 50 jobs.
|
||||
|
|
|
|||
|
|
@ -104,13 +104,11 @@ Link each one to an appropriate place for more information.
|
|||
|
||||
## Instructions
|
||||
|
||||
"Instructions" is usually not the name of the heading.
|
||||
|
||||
This is the part of the document where you can include one or more sets of instructions,
|
||||
each to accomplish a specific task.
|
||||
This is the part of the document where you can include one or more sets of instructions.
|
||||
Each topic should help users accomplish a specific task.
|
||||
|
||||
Headers should describe the task the reader will achieve by following the instructions within,
|
||||
typically starting with a verb.
|
||||
typically starting with a verb. For example, `Create a package` or `Configure a pipeline`.
|
||||
|
||||
Larger instruction sets may have subsections covering specific phases of the process.
|
||||
Where appropriate, provide examples of code or configuration files to better clarify
|
||||
|
|
@ -127,6 +125,32 @@ intended usage.
|
|||
single heading like `## Configure X` for instructions when the feature
|
||||
is simple and the document is short.
|
||||
|
||||
Example topic:
|
||||
|
||||
## Create a teddy bear
|
||||
|
||||
Start by writing a sentence or two about _why_ someone would want to perform this task.
|
||||
It's not always possible, but is a good practice. For example:
|
||||
|
||||
Create a teddy bear when you need something to hug.
|
||||
|
||||
Follow this information with the task steps.
|
||||
|
||||
To create a teddy bear:
|
||||
|
||||
1. Go to **Settings > CI/CD**.
|
||||
1. Expand **This** and click **This**.
|
||||
1. Do another step.
|
||||
|
||||
After the numbered list, add a sentence with the expected result, if it
|
||||
is not obvious, and any next steps. For example:
|
||||
|
||||
The teddy bear is now in the kitchen, in the cupboard above the sink.
|
||||
|
||||
You can retrieve the teddy bear and put it on the couch with the other animals.
|
||||
|
||||
Screenshots are not necessary. They are difficult to keep up-to-date and can clutter the page.
|
||||
|
||||
<!-- ## Troubleshooting
|
||||
|
||||
Include any troubleshooting steps that you can foresee. If you know beforehand what issues
|
||||
|
|
|
|||
|
|
@ -13,6 +13,8 @@ For programmatic help adhering to the guidelines, see [Testing](index.md#testing
|
|||
See the GitLab handbook for further [writing style guidelines](https://about.gitlab.com/handbook/communication/#writing-style-guidelines)
|
||||
that apply to all GitLab content, not just documentation.
|
||||
|
||||
View [a list of recent style guide updates](https://gitlab.com/dashboard/merge_requests?scope=all&utf8=%E2%9C%93&state=merged&label_name[]=tw-style¬[label_name][]=docs%3A%3Afix).
|
||||
|
||||
## Documentation is the single source of truth (SSOT)
|
||||
|
||||
### Why a single source of truth
|
||||
|
|
@ -1522,43 +1524,43 @@ GitLab Community Edition), don't split the product or feature name across lines.
|
|||
|
||||
### Product badges
|
||||
|
||||
When a feature is available in EE-only tiers, add the corresponding tier according to the
|
||||
feature availability:
|
||||
When a feature is available in paid tiers, add the corresponding tier to the
|
||||
header or other page element according to the feature's availability:
|
||||
|
||||
- For GitLab Core and GitLab.com Free: `**(CORE)**`.
|
||||
- For GitLab Starter and GitLab.com Bronze: `**(STARTER)**`.
|
||||
- For GitLab Premium and GitLab.com Silver: `**(PREMIUM)**`.
|
||||
- For GitLab Ultimate and GitLab.com Gold: `**(ULTIMATE)**`.
|
||||
| Tier in which feature is available | Tier markup |
|
||||
|:-----------------------------------------------------------------------|:----------------------|
|
||||
| GitLab Core and GitLab.com Free, and their higher tiers | `**(CORE)**` |
|
||||
| GitLab Starter and GitLab.com Bronze, and their higher tiers | `**(STARTER)**` |
|
||||
| GitLab Premium and GitLab.com Silver, and their higher tiers | `**(PREMIUM)**` |
|
||||
| GitLab Ultimate and GitLab.com Gold | `**(ULTIMATE)**` |
|
||||
| *Only* GitLab Core and higher tiers (no GitLab.com-based tiers) | `**(CORE ONLY)**` |
|
||||
| *Only* GitLab Starter and higher tiers (no GitLab.com-based tiers) | `**(STARTER ONLY)**` |
|
||||
| *Only* GitLab Premium and higher tiers (no GitLab.com-based tiers) | `**(PREMIUM ONLY)**` |
|
||||
| *Only* GitLab Ultimate (no GitLab.com-based tiers) | `**(ULTIMATE ONLY)**` |
|
||||
| *Only* GitLab.com Free and higher tiers (no self-managed instances) | `**(FREE ONLY)**` |
|
||||
| *Only* GitLab.com Bronze and higher tiers (no self-managed instances) | `**(BRONZE ONLY)**` |
|
||||
| *Only* GitLab.com Silver and higher tiers (no self-managed instances) | `**(SILVER ONLY)**` |
|
||||
| *Only* GitLab.com Gold (no self-managed instances) | `**(GOLD ONLY)**` |
|
||||
|
||||
To exclude GitLab.com tiers (when the feature is not available in GitLab.com), add the
|
||||
keyword "only":
|
||||
For clarity, all page title headers (H1s) must be have a tier markup for
|
||||
the lowest tier that has information on the documentation page.
|
||||
|
||||
- For GitLab Core: `**(CORE ONLY)**`.
|
||||
- For GitLab Starter: `**(STARTER ONLY)**`.
|
||||
- For GitLab Premium: `**(PREMIUM ONLY)**`.
|
||||
- For GitLab Ultimate: `**(ULTIMATE ONLY)**`.
|
||||
If sections of a page apply to higher tier levels, they can be separately
|
||||
labeled with their own tier markup.
|
||||
|
||||
For GitLab.com only tiers (when the feature is not available for self-managed instances):
|
||||
#### Product badge display behavior
|
||||
|
||||
- For GitLab Free and higher tiers: `**(FREE ONLY)**`.
|
||||
- For GitLab Bronze and higher tiers: `**(BRONZE ONLY)**`.
|
||||
- For GitLab Silver and higher tiers: `**(SILVER ONLY)**`.
|
||||
- For GitLab Gold: `**(GOLD ONLY)**`.
|
||||
When using the tier markup with headers, the documentation page will
|
||||
display the full tier badge with the header line.
|
||||
|
||||
The tier should be ideally added to headers, so that the full badge will be displayed.
|
||||
However, it can be also mentioned from paragraphs, list items, and table cells. For these cases,
|
||||
the tier mention will be represented by an orange info icon **(information)** that will show the tiers on hover.
|
||||
You can also use the tier markup with paragraphs, list items,
|
||||
and table cells. For these cases, the tier mention will be represented by an
|
||||
orange info icon **{information}** that will display the tiers when visitors
|
||||
point to the icon. For example:
|
||||
|
||||
Use the lowest tier at the page level, even if higher-level tiers exist on the page. For example, you might have a page that is marked as Starter but a section badged as Premium.
|
||||
|
||||
For example:
|
||||
|
||||
- `**(STARTER)**` renders as **(STARTER)**
|
||||
- `**(STARTER ONLY)**` renders as **(STARTER ONLY)**
|
||||
- `**(SILVER ONLY)**` renders as **(SILVER ONLY)**
|
||||
|
||||
The absence of tiers' mentions mean that the feature is available in GitLab Core,
|
||||
GitLab.com Free, and all higher tiers.
|
||||
- `**(STARTER)**` displays as **(STARTER)**
|
||||
- `**(STARTER ONLY)**` displays as **(STARTER ONLY)**
|
||||
- `**(SILVER ONLY)**` displays as **(SILVER ONLY)**
|
||||
|
||||
#### How it works
|
||||
|
||||
|
|
|
|||
|
|
@ -59,6 +59,13 @@ If you have other target branches, include them in your regex. (See [Enabling pu
|
|||
The default branch also defaults to being a [protected branch](../user/project/protected_branches.md),
|
||||
which already limits users from pushing directly.
|
||||
|
||||
#### Default restricted branch names
|
||||
|
||||
> Introduced in GitLab 12.10.
|
||||
|
||||
By default, GitLab restricts certain formats of branch names for security purposes.
|
||||
Currently 40-character hexadecimal names, similar to Git commit hashes, are prohibited.
|
||||
|
||||
### Custom Push Rules **(CORE ONLY)**
|
||||
|
||||
It's possible to create custom push rules rather than the push rules available in
|
||||
|
|
|
|||
|
|
@ -0,0 +1,28 @@
|
|||
---
|
||||
type: reference, howto
|
||||
---
|
||||
|
||||
# Project Import Decompressed Archive Size Limits
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/31564) in GitLab 13.2.
|
||||
|
||||
When using [Project Import](../user/project/settings/import_export.md), the size of the decompressed project archive is limited to 10Gb.
|
||||
|
||||
If decompressed size exceeds this limit, `Decompressed archive size validation failed` error is returned.
|
||||
|
||||
## Enable/disable size validation
|
||||
|
||||
Decompressed size validation is enabled by default.
|
||||
If you have a project with decompressed size exceeding this limit,
|
||||
it is possible to disable the validation by turning off the
|
||||
`validate_import_decompressed_archive_size` feature flag.
|
||||
|
||||
Start a [Rails console](../administration/troubleshooting/debug.md#starting-a-rails-console-session).
|
||||
|
||||
```ruby
|
||||
# Disable
|
||||
Feature.disable(:validate_import_decompressed_archive_size)
|
||||
|
||||
# Enable
|
||||
Feature.enable(:validate_import_decompressed_archive_size)
|
||||
```
|
||||
|
|
@ -87,7 +87,9 @@ Enter the further details on the page as described in the following table.
|
|||
| `Password/API token` |Created in [configuring Jira](#configuring-jira) step. Use `password` for **Jira Server** or `API token` for **Jira Cloud**. |
|
||||
| `Transition ID` | Required for closing Jira issues via commits or merge requests. This is the ID of a transition in Jira that moves issues to a desired state. (See [Obtaining a transition ID](#obtaining-a-transition-id).) If you insert multiple transition IDs separated by `,` or `;`, the issue is moved to each state, one after another, using the given order. |
|
||||
|
||||
To enable users to view Jira issues inside GitLab, select **Enable Jira issues** and enter a project key. **(PREMIUM)**
|
||||
To enable users to view Jira issues inside the GitLab project, select **Enable Jira issues** and enter a Jira project key. **(PREMIUM)**
|
||||
|
||||
You can only display issues from a single Jira project within a given GitLab project.
|
||||
|
||||
CAUTION: **Caution:**
|
||||
If you enable Jira issues with the setting above, all users that have access to this GitLab project will be able to view all issues from the specified Jira project.
|
||||
|
|
|
|||
|
|
@ -125,3 +125,5 @@ module Banzai
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
Banzai::Filter::LabelReferenceFilter.prepend_if_ee('EE::Banzai::Filter::LabelReferenceFilter')
|
||||
|
|
|
|||
|
|
@ -53,7 +53,6 @@ module Banzai
|
|||
attributes[:reference_type] ||= self.class.reference_type
|
||||
attributes[:container] ||= 'body'
|
||||
attributes[:placement] ||= 'top'
|
||||
attributes[:html] ||= 'true'
|
||||
attributes.delete(:original) if context[:no_original_data]
|
||||
attributes.map do |key, value|
|
||||
%Q(data-#{key.to_s.dasherize}="#{escape_once(value)}")
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@
|
|||
module Gitlab
|
||||
class BaseDoorkeeperController < ActionController::Base
|
||||
include Gitlab::Allowable
|
||||
include EnforcesTwoFactorAuthentication
|
||||
|
||||
helper_method :can?
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -12,7 +12,8 @@ module Gitlab
|
|||
push_protected_branch: 'You are not allowed to push code to protected branches on this project.',
|
||||
create_protected_branch: 'You are not allowed to create protected branches on this project.',
|
||||
invalid_commit_create_protected_branch: 'You can only use an existing protected branch ref as the basis of a new protected branch.',
|
||||
non_web_create_protected_branch: 'You can only create protected branches using the web interface and API.'
|
||||
non_web_create_protected_branch: 'You can only create protected branches using the web interface and API.',
|
||||
prohibited_hex_branch_name: 'You cannot create a branch with a 40-character hexadecimal branch name.'
|
||||
}.freeze
|
||||
|
||||
LOG_MESSAGES = {
|
||||
|
|
@ -32,11 +33,20 @@ module Gitlab
|
|||
end
|
||||
end
|
||||
|
||||
prohibited_branch_checks
|
||||
protected_branch_checks
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def prohibited_branch_checks
|
||||
return unless Feature.enabled?(:prohibit_hexadecimal_branch_names, project, default_enabled: true)
|
||||
|
||||
if branch_name =~ /\A\h{40}\z/
|
||||
raise GitAccess::ForbiddenError, ERROR_MESSAGES[:prohibited_hex_branch_name]
|
||||
end
|
||||
end
|
||||
|
||||
def protected_branch_checks
|
||||
logger.log_timed(LOG_MESSAGES[:protected_branch_checks]) do
|
||||
return unless ProtectedBranch.protected?(project, branch_name) # rubocop:disable Cop/AvoidReturnFromBlocks
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ module Gitlab
|
|||
module Product
|
||||
class Parallel < ::Gitlab::Config::Entry::Simplifiable
|
||||
strategy :ParallelBuilds, if: -> (config) { config.is_a?(Numeric) }
|
||||
strategy :MatrixBuilds, if: -> (config) { ::Gitlab::Ci::Features.parallel_matrix_enabled? && config.is_a?(Hash) }
|
||||
strategy :MatrixBuilds, if: -> (config) { config.is_a?(Hash) }
|
||||
|
||||
PARALLEL_LIMIT = 50
|
||||
|
||||
|
|
|
|||
|
|
@ -29,11 +29,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def strategies
|
||||
if ::Gitlab::Ci::Features.parallel_matrix_enabled?
|
||||
[NumberStrategy, MatrixStrategy]
|
||||
else
|
||||
[NumberStrategy]
|
||||
end
|
||||
[NumberStrategy, MatrixStrategy]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -62,10 +62,6 @@ module Gitlab
|
|||
::Feature.enabled?(:destroy_only_unlocked_expired_artifacts, default_enabled: false)
|
||||
end
|
||||
|
||||
def self.parallel_matrix_enabled?
|
||||
::Feature.enabled?(:ci_parallel_matrix_enabled)
|
||||
end
|
||||
|
||||
def self.bulk_insert_on_create?(project)
|
||||
::Feature.enabled?(:ci_bulk_insert_on_create, project, default_enabled: true)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,90 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'zlib'
|
||||
|
||||
module Gitlab
|
||||
module ImportExport
|
||||
class DecompressedArchiveSizeValidator
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
DEFAULT_MAX_BYTES = 10.gigabytes.freeze
|
||||
CHUNK_SIZE = 4096.freeze
|
||||
|
||||
attr_reader :error
|
||||
|
||||
def initialize(archive_path:, max_bytes: self.class.max_bytes)
|
||||
@archive_path = archive_path
|
||||
@max_bytes = max_bytes
|
||||
@bytes_read = 0
|
||||
@total_reads = 0
|
||||
@denominator = 5
|
||||
@error = nil
|
||||
end
|
||||
|
||||
def valid?
|
||||
strong_memoize(:valid) do
|
||||
validate
|
||||
end
|
||||
end
|
||||
|
||||
def self.max_bytes
|
||||
DEFAULT_MAX_BYTES
|
||||
end
|
||||
|
||||
def archive_file
|
||||
@archive_file ||= File.open(@archive_path)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def validate
|
||||
until archive_file.eof?
|
||||
compressed_chunk = archive_file.read(CHUNK_SIZE)
|
||||
|
||||
inflate_stream.inflate(compressed_chunk) do |chunk|
|
||||
@bytes_read += chunk.size
|
||||
@total_reads += 1
|
||||
end
|
||||
|
||||
# Start garbage collection every 5 reads in order
|
||||
# to prevent memory bloat during archive decompression
|
||||
GC.start if gc_start?
|
||||
|
||||
if @bytes_read > @max_bytes
|
||||
@error = error_message
|
||||
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
true
|
||||
rescue => e
|
||||
@error = error_message
|
||||
|
||||
Gitlab::ErrorTracking.track_exception(e)
|
||||
|
||||
Gitlab::Import::Logger.info(
|
||||
message: @error,
|
||||
error: e.message
|
||||
)
|
||||
|
||||
false
|
||||
ensure
|
||||
inflate_stream.close
|
||||
archive_file.close
|
||||
end
|
||||
|
||||
def inflate_stream
|
||||
@inflate_stream ||= Zlib::Inflate.new(Zlib::MAX_WBITS + 32)
|
||||
end
|
||||
|
||||
def gc_start?
|
||||
@total_reads % @denominator == 0
|
||||
end
|
||||
|
||||
def error_message
|
||||
_('Decompressed archive size validation failed.')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -28,6 +28,7 @@ module Gitlab
|
|||
copy_archive
|
||||
|
||||
wait_for_archived_file do
|
||||
validate_decompressed_archive_size if Feature.enabled?(:validate_import_decompressed_archive_size, default_enabled: true)
|
||||
decompress_archive
|
||||
end
|
||||
rescue => e
|
||||
|
|
@ -82,6 +83,14 @@ module Gitlab
|
|||
def extracted_files
|
||||
Dir.glob("#{@shared.export_path}/**/*", File::FNM_DOTMATCH).reject { |f| IGNORED_FILENAMES.include?(File.basename(f)) }
|
||||
end
|
||||
|
||||
def validate_decompressed_archive_size
|
||||
raise ImporterError.new(size_validator.error) unless size_validator.valid?
|
||||
end
|
||||
|
||||
def size_validator
|
||||
@size_validator ||= DecompressedArchiveSizeValidator.new(archive_path: @archive_file)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -116,15 +116,15 @@ module Gitlab
|
|||
def self.graceful_request(cluster_id)
|
||||
{ status: :connected, response: yield }
|
||||
rescue *Gitlab::Kubernetes::Errors::CONNECTION
|
||||
{ status: :unreachable }
|
||||
{ status: :unreachable, connection_error: :connection_error }
|
||||
rescue *Gitlab::Kubernetes::Errors::AUTHENTICATION
|
||||
{ status: :authentication_failure }
|
||||
{ status: :authentication_failure, connection_error: :authentication_error }
|
||||
rescue Kubeclient::HttpError => e
|
||||
{ status: kubeclient_error_status(e.message) }
|
||||
{ status: kubeclient_error_status(e.message), connection_error: :http_error }
|
||||
rescue => e
|
||||
Gitlab::ErrorTracking.track_exception(e, cluster_id: cluster_id)
|
||||
|
||||
{ status: :unknown_failure }
|
||||
{ status: :unknown_failure, connection_error: :unknown_error }
|
||||
end
|
||||
|
||||
# KubeClient uses the same error class
|
||||
|
|
|
|||
|
|
@ -8,22 +8,29 @@ module Gitlab
|
|||
end
|
||||
|
||||
def all
|
||||
nodes.map do |node|
|
||||
attributes = node(node)
|
||||
attributes.merge(node_metrics(node))
|
||||
end
|
||||
{
|
||||
nodes: metadata.presence,
|
||||
node_connection_error: nodes_from_cluster[:connection_error],
|
||||
metrics_connection_error: nodes_metrics_from_cluster[:connection_error]
|
||||
}.compact
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :cluster
|
||||
|
||||
def metadata
|
||||
nodes.map do |node|
|
||||
base_data(node).merge(node_metrics(node))
|
||||
end
|
||||
end
|
||||
|
||||
def nodes_from_cluster
|
||||
graceful_request { cluster.kubeclient.get_nodes }
|
||||
@nodes_from_cluster ||= graceful_request { cluster.kubeclient.get_nodes }
|
||||
end
|
||||
|
||||
def nodes_metrics_from_cluster
|
||||
graceful_request { cluster.kubeclient.metrics_client.get_nodes }
|
||||
@nodes_metrics_from_cluster ||= graceful_request { cluster.kubeclient.metrics_client.get_nodes }
|
||||
end
|
||||
|
||||
def nodes
|
||||
|
|
@ -44,7 +51,7 @@ module Gitlab
|
|||
::Gitlab::Kubernetes::KubeClient.graceful_request(cluster.id, &block)
|
||||
end
|
||||
|
||||
def node(node)
|
||||
def base_data(node)
|
||||
{
|
||||
'metadata' => {
|
||||
'name' => node.metadata.name
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
module Gitlab
|
||||
module MarkdownCache
|
||||
# Increment this number every time the renderer changes its output
|
||||
CACHE_COMMONMARK_VERSION = 23
|
||||
CACHE_COMMONMARK_VERSION = 24
|
||||
CACHE_COMMONMARK_VERSION_START = 10
|
||||
|
||||
BaseError = Class.new(StandardError)
|
||||
|
|
|
|||
|
|
@ -3482,6 +3482,9 @@ msgstr ""
|
|||
msgid "Author: %{author_name}"
|
||||
msgstr ""
|
||||
|
||||
msgid "Authored %{timeago}"
|
||||
msgstr ""
|
||||
|
||||
msgid "Authored %{timeago} by %{author}"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -7650,6 +7653,9 @@ msgstr ""
|
|||
msgid "Decline and sign out"
|
||||
msgstr ""
|
||||
|
||||
msgid "Decompressed archive size validation failed."
|
||||
msgstr ""
|
||||
|
||||
msgid "Default Branch"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -13629,6 +13635,12 @@ msgstr ""
|
|||
msgid "Job|This job is stuck because the project doesn't have any runners online assigned to it."
|
||||
msgstr ""
|
||||
|
||||
msgid "Job|This job is stuck because you don't have any active runners online or available with any of these tags assigned to them:"
|
||||
msgstr ""
|
||||
|
||||
msgid "Job|This job is stuck because you don't have any active runners that can run this job."
|
||||
msgstr ""
|
||||
|
||||
msgid "Job|for"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -24790,12 +24802,6 @@ msgstr ""
|
|||
msgid "This job is preparing to start"
|
||||
msgstr ""
|
||||
|
||||
msgid "This job is stuck because you don't have any active runners online or available with any of these tags assigned to them:"
|
||||
msgstr ""
|
||||
|
||||
msgid "This job is stuck because you don't have any active runners that can run this job."
|
||||
msgstr ""
|
||||
|
||||
msgid "This job is waiting for resource: "
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -19,12 +19,29 @@ RSpec.describe Oauth::ApplicationsController do
|
|||
it { is_expected.to redirect_to(new_user_session_path) }
|
||||
end
|
||||
|
||||
shared_examples 'redirects to 2fa setup page when the user requires it' do
|
||||
context 'when 2fa is set up on application level' do
|
||||
before do
|
||||
stub_application_setting(require_two_factor_authentication: true)
|
||||
end
|
||||
|
||||
it { is_expected.to redirect_to(profile_two_factor_auth_path) }
|
||||
end
|
||||
|
||||
context 'when 2fa is set up on group level' do
|
||||
let(:user) { create(:user, require_two_factor_authentication_from_group: true) }
|
||||
|
||||
it { is_expected.to redirect_to(profile_two_factor_auth_path) }
|
||||
end
|
||||
end
|
||||
|
||||
describe 'GET #new' do
|
||||
subject { get :new }
|
||||
|
||||
it { is_expected.to have_gitlab_http_status(:ok) }
|
||||
|
||||
it_behaves_like 'redirects to login page when the user is not signed in'
|
||||
it_behaves_like 'redirects to 2fa setup page when the user requires it'
|
||||
end
|
||||
|
||||
describe 'DELETE #destroy' do
|
||||
|
|
@ -33,6 +50,7 @@ RSpec.describe Oauth::ApplicationsController do
|
|||
it { is_expected.to redirect_to(oauth_applications_url) }
|
||||
|
||||
it_behaves_like 'redirects to login page when the user is not signed in'
|
||||
it_behaves_like 'redirects to 2fa setup page when the user requires it'
|
||||
end
|
||||
|
||||
describe 'GET #edit' do
|
||||
|
|
@ -41,6 +59,7 @@ RSpec.describe Oauth::ApplicationsController do
|
|||
it { is_expected.to have_gitlab_http_status(:ok) }
|
||||
|
||||
it_behaves_like 'redirects to login page when the user is not signed in'
|
||||
it_behaves_like 'redirects to 2fa setup page when the user requires it'
|
||||
end
|
||||
|
||||
describe 'PUT #update' do
|
||||
|
|
@ -49,6 +68,7 @@ RSpec.describe Oauth::ApplicationsController do
|
|||
it { is_expected.to redirect_to(oauth_application_url(application)) }
|
||||
|
||||
it_behaves_like 'redirects to login page when the user is not signed in'
|
||||
it_behaves_like 'redirects to 2fa setup page when the user requires it'
|
||||
end
|
||||
|
||||
describe 'GET #show' do
|
||||
|
|
@ -57,6 +77,7 @@ RSpec.describe Oauth::ApplicationsController do
|
|||
it { is_expected.to have_gitlab_http_status(:ok) }
|
||||
|
||||
it_behaves_like 'redirects to login page when the user is not signed in'
|
||||
it_behaves_like 'redirects to 2fa setup page when the user requires it'
|
||||
end
|
||||
|
||||
describe 'GET #index' do
|
||||
|
|
@ -73,6 +94,7 @@ RSpec.describe Oauth::ApplicationsController do
|
|||
end
|
||||
|
||||
it_behaves_like 'redirects to login page when the user is not signed in'
|
||||
it_behaves_like 'redirects to 2fa setup page when the user requires it'
|
||||
end
|
||||
|
||||
describe 'POST #create' do
|
||||
|
|
@ -112,6 +134,7 @@ RSpec.describe Oauth::ApplicationsController do
|
|||
end
|
||||
|
||||
it_behaves_like 'redirects to login page when the user is not signed in'
|
||||
it_behaves_like 'redirects to 2fa setup page when the user requires it'
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -119,6 +142,10 @@ RSpec.describe Oauth::ApplicationsController do
|
|||
it 'current_user_mode available' do
|
||||
expect(subject.current_user_mode).not_to be_nil
|
||||
end
|
||||
|
||||
it 'includes Two-factor enforcement concern' do
|
||||
expect(described_class.included_modules.include?(EnforcesTwoFactorAuthentication)).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'locale' do
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Oauth::AuthorizationsController do
|
||||
let(:user) { create(:user, confirmed_at: confirmed_at) }
|
||||
let(:confirmed_at) { 1.hour.ago }
|
||||
let!(:application) { create(:oauth_application, scopes: 'api read_user', redirect_uri: 'http://example.com') }
|
||||
let(:params) do
|
||||
{
|
||||
|
|
@ -17,9 +19,45 @@ RSpec.describe Oauth::AuthorizationsController do
|
|||
sign_in(user)
|
||||
end
|
||||
|
||||
describe 'GET #new' do
|
||||
shared_examples 'OAuth Authorizations require confirmed user' do
|
||||
context 'when the user is confirmed' do
|
||||
let(:user) { create(:user) }
|
||||
context 'when there is already an access token for the application with a matching scope' do
|
||||
before do
|
||||
scopes = Doorkeeper::OAuth::Scopes.from_string('api')
|
||||
|
||||
allow(Doorkeeper.configuration).to receive(:scopes).and_return(scopes)
|
||||
|
||||
create(:oauth_access_token, application: application, resource_owner_id: user.id, scopes: scopes)
|
||||
end
|
||||
|
||||
it 'authorizes the request and redirects' do
|
||||
subject
|
||||
|
||||
expect(request.session['user_return_to']).to be_nil
|
||||
expect(response).to have_gitlab_http_status(:found)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the user is unconfirmed' do
|
||||
let(:confirmed_at) { nil }
|
||||
|
||||
it 'returns 200 and renders error view' do
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to render_template('doorkeeper/authorizations/error')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'GET #new' do
|
||||
subject { get :new, params: params }
|
||||
|
||||
include_examples 'OAuth Authorizations require confirmed user'
|
||||
|
||||
context 'when the user is confirmed' do
|
||||
let(:confirmed_at) { 1.hour.ago }
|
||||
|
||||
context 'without valid params' do
|
||||
it 'returns 200 code and renders error view' do
|
||||
|
|
@ -34,7 +72,7 @@ RSpec.describe Oauth::AuthorizationsController do
|
|||
render_views
|
||||
|
||||
it 'returns 200 code and renders view' do
|
||||
get :new, params: params
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to render_template('doorkeeper/authorizations/new')
|
||||
|
|
@ -44,42 +82,28 @@ RSpec.describe Oauth::AuthorizationsController do
|
|||
application.update(trusted: true)
|
||||
request.session['user_return_to'] = 'http://example.com'
|
||||
|
||||
get :new, params: params
|
||||
subject
|
||||
|
||||
expect(request.session['user_return_to']).to be_nil
|
||||
expect(response).to have_gitlab_http_status(:found)
|
||||
end
|
||||
|
||||
context 'when there is already an access token for the application' do
|
||||
context 'when the request scope matches any of the created token scopes' do
|
||||
before do
|
||||
scopes = Doorkeeper::OAuth::Scopes.from_string('api')
|
||||
|
||||
allow(Doorkeeper.configuration).to receive(:scopes).and_return(scopes)
|
||||
|
||||
create :oauth_access_token, application: application, resource_owner_id: user.id, scopes: scopes
|
||||
end
|
||||
|
||||
it 'authorizes the request and redirects' do
|
||||
get :new, params: params
|
||||
|
||||
expect(request.session['user_return_to']).to be_nil
|
||||
expect(response).to have_gitlab_http_status(:found)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the user is unconfirmed' do
|
||||
let(:user) { create(:user, confirmed_at: nil) }
|
||||
|
||||
it 'returns 200 and renders error view' do
|
||||
get :new, params: params
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response).to render_template('doorkeeper/authorizations/error')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'POST #create' do
|
||||
subject { post :create, params: params }
|
||||
|
||||
include_examples 'OAuth Authorizations require confirmed user'
|
||||
end
|
||||
|
||||
describe 'DELETE #destroy' do
|
||||
subject { delete :destroy, params: params }
|
||||
|
||||
include_examples 'OAuth Authorizations require confirmed user'
|
||||
end
|
||||
|
||||
it 'includes Two-factor enforcement concern' do
|
||||
expect(described_class.included_modules.include?(EnforcesTwoFactorAuthentication)).to eq(true)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -18,4 +18,24 @@ RSpec.describe Oauth::AuthorizedApplicationsController do
|
|||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'DELETE #destroy' do
|
||||
let(:application) { create(:oauth_application) }
|
||||
let!(:grant) { create(:oauth_access_grant, resource_owner_id: user.id, application: application) }
|
||||
let!(:access_token) { create(:oauth_access_token, resource_owner: user, application: application) }
|
||||
|
||||
it 'revokes both access grants and tokens' do
|
||||
expect(grant).not_to be_revoked
|
||||
expect(access_token).not_to be_revoked
|
||||
|
||||
delete :destroy, params: { id: application.id }
|
||||
|
||||
expect(grant.reload).to be_revoked
|
||||
expect(access_token.reload).to be_revoked
|
||||
end
|
||||
end
|
||||
|
||||
it 'includes Two-factor enforcement concern' do
|
||||
expect(described_class.included_modules.include?(EnforcesTwoFactorAuthentication)).to eq(true)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -68,4 +68,8 @@ RSpec.describe Oauth::TokenInfoController do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
it 'includes Two-factor enforcement concern' do
|
||||
expect(described_class.included_modules.include?(EnforcesTwoFactorAuthentication)).to eq(true)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Oauth::TokensController do
|
||||
it 'includes Two-factor enforcement concern' do
|
||||
expect(described_class.included_modules.include?(EnforcesTwoFactorAuthentication)).to eq(true)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,160 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Repositories::LfsStorageController do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
include GitHttpHelpers
|
||||
|
||||
let_it_be(:project) { create(:project, :public) }
|
||||
let_it_be(:user) { create(:user) }
|
||||
let_it_be(:pat) { create(:personal_access_token, user: user, scopes: ['write_repository']) }
|
||||
|
||||
let(:lfs_enabled) { true }
|
||||
|
||||
before do
|
||||
stub_config(lfs: { enabled: lfs_enabled })
|
||||
end
|
||||
|
||||
describe 'PUT #upload_finalize' do
|
||||
let(:headers) { workhorse_internal_api_request_header }
|
||||
let(:extra_headers) { {} }
|
||||
let(:uploaded_file) { temp_file }
|
||||
|
||||
let(:params) do
|
||||
{
|
||||
namespace_id: project.namespace.path,
|
||||
repository_id: "#{project.path}.git",
|
||||
oid: '6b9765d3888aaec789e8c309eb05b05c3a87895d6ad70d2264bd7270fff665ac',
|
||||
size: '6725030'
|
||||
}
|
||||
end
|
||||
|
||||
before do
|
||||
request.headers.merge!(extra_headers)
|
||||
request.headers.merge!(headers)
|
||||
|
||||
if uploaded_file
|
||||
allow_next_instance_of(ActionController::Parameters) do |params|
|
||||
allow(params).to receive(:[]).and_call_original
|
||||
allow(params).to receive(:[]).with(:file).and_return(uploaded_file)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
after do
|
||||
FileUtils.rm_r(temp_file) if temp_file
|
||||
end
|
||||
|
||||
subject do
|
||||
put :upload_finalize, params: params
|
||||
end
|
||||
|
||||
context 'with lfs enabled' do
|
||||
context 'with unauthorized roles' do
|
||||
where(:user_role, :expected_status) do
|
||||
:guest | :forbidden
|
||||
:anonymous | :unauthorized
|
||||
end
|
||||
|
||||
with_them do
|
||||
let(:extra_headers) do
|
||||
if user_role == :anonymous
|
||||
{}
|
||||
else
|
||||
{ 'HTTP_AUTHORIZATION' => ActionController::HttpAuthentication::Basic.encode_credentials(user.username, pat.token) }
|
||||
end
|
||||
end
|
||||
|
||||
before do
|
||||
project.send("add_#{user_role}", user) unless user_role == :anonymous
|
||||
end
|
||||
|
||||
it_behaves_like 'returning response status', params[:expected_status]
|
||||
end
|
||||
end
|
||||
|
||||
context 'with at least developer role' do
|
||||
let(:extra_headers) { { 'HTTP_AUTHORIZATION' => ActionController::HttpAuthentication::Basic.encode_credentials(user.username, pat.token) } }
|
||||
|
||||
before do
|
||||
project.add_developer(user)
|
||||
end
|
||||
|
||||
it 'creates the objects' do
|
||||
expect { subject }
|
||||
.to change { LfsObject.count }.by(1)
|
||||
.and change { LfsObjectsProject.count }.by(1)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
end
|
||||
|
||||
context 'without the workhorse header' do
|
||||
let(:headers) { {} }
|
||||
|
||||
it { expect { subject }.to raise_error(JWT::DecodeError) }
|
||||
end
|
||||
|
||||
context 'without file' do
|
||||
let(:uploaded_file) { nil }
|
||||
|
||||
it_behaves_like 'returning response status', :unprocessable_entity
|
||||
end
|
||||
|
||||
context 'with an invalid file' do
|
||||
let(:uploaded_file) { 'test' }
|
||||
|
||||
it_behaves_like 'returning response status', :unprocessable_entity
|
||||
end
|
||||
|
||||
context 'when an expected error' do
|
||||
[
|
||||
ActiveRecord::RecordInvalid,
|
||||
UploadedFile::InvalidPathError,
|
||||
ObjectStorage::RemoteStoreError
|
||||
].each do |exception_class|
|
||||
context "#{exception_class} raised" do
|
||||
it 'renders lfs forbidden' do
|
||||
expect(LfsObjectsProject).to receive(:safe_find_or_create_by!).and_raise(exception_class)
|
||||
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:forbidden)
|
||||
expect(json_response['documentation_url']).to be_present
|
||||
expect(json_response['message']).to eq('Access forbidden. Check your access level.')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when file is not stored' do
|
||||
it 'renders unprocessable entity' do
|
||||
expect(controller).to receive(:store_file!).and_return(nil)
|
||||
|
||||
subject
|
||||
|
||||
expect(response).to have_gitlab_http_status(:unprocessable_entity)
|
||||
expect(response.body).to eq('Unprocessable entity')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with lfs disabled' do
|
||||
let(:lfs_enabled) { false }
|
||||
let(:extra_headers) { { 'HTTP_AUTHORIZATION' => ActionController::HttpAuthentication::Basic.encode_credentials(user.username, pat.token) } }
|
||||
|
||||
it_behaves_like 'returning response status', :not_implemented
|
||||
end
|
||||
|
||||
def temp_file
|
||||
upload_path = LfsObjectUploader.workhorse_local_upload_path
|
||||
file_path = "#{upload_path}/lfs"
|
||||
|
||||
FileUtils.mkdir_p(upload_path)
|
||||
File.write(file_path, 'test')
|
||||
|
||||
UploadedFile.new(file_path, filename: File.basename(file_path))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -551,7 +551,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
|
|||
|
||||
it 'shows deployment message' do
|
||||
expect(page).to have_content 'This job is deployed to production'
|
||||
expect(find('.js-environment-link')['href']).to match("environments/#{environment.id}")
|
||||
expect(find('[data-testid="job-environment-link"]')['href']).to match("environments/#{environment.id}")
|
||||
end
|
||||
|
||||
context 'when there is a cluster used for the deployment' do
|
||||
|
|
@ -583,7 +583,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
|
|||
|
||||
it 'shows a link for the job' do
|
||||
expect(page).to have_link environment.name
|
||||
expect(find('.js-environment-link')['href']).to match("environments/#{environment.id}")
|
||||
expect(find('[data-testid="job-environment-link"]')['href']).to match("environments/#{environment.id}")
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -593,7 +593,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
|
|||
it 'shows a link to latest deployment' do
|
||||
expect(page).to have_link environment.name
|
||||
expect(page).to have_content 'This job is creating a deployment'
|
||||
expect(find('.js-environment-link')['href']).to match("environments/#{environment.id}")
|
||||
expect(find('[data-testid="job-environment-link"]')['href']).to match("environments/#{environment.id}")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -645,15 +645,15 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
|
|||
end
|
||||
|
||||
it 'renders a link to the most recent deployment' do
|
||||
expect(find('.js-environment-link')['href']).to match("environments/#{environment.id}")
|
||||
expect(find('.js-job-deployment-link')['href']).to include(second_deployment.deployable.project.path, second_deployment.deployable_id.to_s)
|
||||
expect(find('[data-testid="job-environment-link"]')['href']).to match("environments/#{environment.id}")
|
||||
expect(find('[data-testid="job-deployment-link"]')['href']).to include(second_deployment.deployable.project.path, second_deployment.deployable_id.to_s)
|
||||
end
|
||||
|
||||
context 'when deployment does not have a deployable' do
|
||||
let!(:second_deployment) { create(:deployment, :success, environment: environment, deployable: nil) }
|
||||
|
||||
it 'has an empty href' do
|
||||
expect(find('.js-job-deployment-link')['href']).to be_empty
|
||||
expect(find('[data-testid="job-deployment-link"]')['href']).to be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -679,7 +679,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
|
|||
expected_text = 'This job is creating a deployment to staging'
|
||||
|
||||
expect(page).to have_css('.environment-information', text: expected_text)
|
||||
expect(find('.js-environment-link')['href']).to match("environments/#{environment.id}")
|
||||
expect(find('[data-testid="job-environment-link"]')['href']).to match("environments/#{environment.id}")
|
||||
end
|
||||
|
||||
context 'when it has deployment' do
|
||||
|
|
@ -690,7 +690,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
|
|||
|
||||
expect(page).to have_css('.environment-information', text: expected_text)
|
||||
expect(page).to have_css('.environment-information', text: 'latest deployment')
|
||||
expect(find('.js-environment-link')['href']).to match("environments/#{environment.id}")
|
||||
expect(find('[data-testid="job-environment-link"]')['href']).to match("environments/#{environment.id}")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -705,7 +705,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
|
|||
'.environment-information', text: expected_text)
|
||||
expect(page).not_to have_css(
|
||||
'.environment-information', text: 'latest deployment')
|
||||
expect(find('.js-environment-link')['href']).to match("environments/#{environment.id}")
|
||||
expect(find('[data-testid="job-environment-link"]')['href']).to match("environments/#{environment.id}")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -16,13 +16,15 @@ exports[`grafana integration component default state to match the default snapsh
|
|||
|
||||
</h3>
|
||||
|
||||
<gl-deprecated-button-stub
|
||||
<gl-button-stub
|
||||
category="tertiary"
|
||||
class="js-settings-toggle"
|
||||
size="md"
|
||||
variant="secondary"
|
||||
icon=""
|
||||
size="medium"
|
||||
variant="default"
|
||||
>
|
||||
Expand
|
||||
</gl-deprecated-button-stub>
|
||||
</gl-button-stub>
|
||||
|
||||
<p
|
||||
class="js-section-sub-header"
|
||||
|
|
@ -90,14 +92,16 @@ exports[`grafana integration component default state to match the default snapsh
|
|||
</p>
|
||||
</gl-form-group-stub>
|
||||
|
||||
<gl-deprecated-button-stub
|
||||
size="md"
|
||||
<gl-button-stub
|
||||
category="primary"
|
||||
icon=""
|
||||
size="medium"
|
||||
variant="success"
|
||||
>
|
||||
|
||||
Save Changes
|
||||
|
||||
</gl-deprecated-button-stub>
|
||||
</gl-button-stub>
|
||||
</form>
|
||||
</div>
|
||||
</section>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { mount, shallowMount } from '@vue/test-utils';
|
||||
import { GlDeprecatedButton } from '@gitlab/ui';
|
||||
import { GlButton } from '@gitlab/ui';
|
||||
import { TEST_HOST } from 'helpers/test_constants';
|
||||
import GrafanaIntegration from '~/grafana_integration/components/grafana_integration.vue';
|
||||
import { createStore } from '~/grafana_integration/store';
|
||||
|
|
@ -51,7 +51,7 @@ describe('grafana integration component', () => {
|
|||
it('renders as an expand button by default', () => {
|
||||
wrapper = shallowMount(GrafanaIntegration, { store });
|
||||
|
||||
const button = wrapper.find(GlDeprecatedButton);
|
||||
const button = wrapper.find(GlButton);
|
||||
|
||||
expect(button.text()).toBe('Expand');
|
||||
});
|
||||
|
|
@ -77,8 +77,7 @@ describe('grafana integration component', () => {
|
|||
});
|
||||
|
||||
describe('submit button', () => {
|
||||
const findSubmitButton = () =>
|
||||
wrapper.find('.settings-content form').find(GlDeprecatedButton);
|
||||
const findSubmitButton = () => wrapper.find('.settings-content form').find(GlButton);
|
||||
|
||||
const endpointRequest = [
|
||||
operationsSettingsEndpoint,
|
||||
|
|
|
|||
|
|
@ -1,14 +1,13 @@
|
|||
import Vue from 'vue';
|
||||
import component from '~/jobs/components/environments_block.vue';
|
||||
import mountComponent from '../../helpers/vue_mount_component_helper';
|
||||
import { mount } from '@vue/test-utils';
|
||||
import EnvironmentsBlock from '~/jobs/components/environments_block.vue';
|
||||
|
||||
const TEST_CLUSTER_NAME = 'test_cluster';
|
||||
const TEST_CLUSTER_PATH = 'path/to/test_cluster';
|
||||
const TEST_KUBERNETES_NAMESPACE = 'this-is-a-kubernetes-namespace';
|
||||
|
||||
describe('Environments block', () => {
|
||||
const Component = Vue.extend(component);
|
||||
let vm;
|
||||
let wrapper;
|
||||
|
||||
const status = {
|
||||
group: 'success',
|
||||
icon: 'status_success',
|
||||
|
|
@ -38,20 +37,23 @@ describe('Environments block', () => {
|
|||
});
|
||||
|
||||
const createComponent = (deploymentStatus = {}, deploymentCluster = {}) => {
|
||||
vm = mountComponent(Component, {
|
||||
deploymentStatus,
|
||||
deploymentCluster,
|
||||
iconStatus: status,
|
||||
wrapper = mount(EnvironmentsBlock, {
|
||||
propsData: {
|
||||
deploymentStatus,
|
||||
deploymentCluster,
|
||||
iconStatus: status,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const findText = () => vm.$el.textContent.trim();
|
||||
const findJobDeploymentLink = () => vm.$el.querySelector('.js-job-deployment-link');
|
||||
const findEnvironmentLink = () => vm.$el.querySelector('.js-environment-link');
|
||||
const findClusterLink = () => vm.$el.querySelector('.js-job-cluster-link');
|
||||
const findText = () => wrapper.find(EnvironmentsBlock).text();
|
||||
const findJobDeploymentLink = () => wrapper.find('[data-testid="job-deployment-link"]');
|
||||
const findEnvironmentLink = () => wrapper.find('[data-testid="job-environment-link"]');
|
||||
const findClusterLink = () => wrapper.find('[data-testid="job-cluster-link"]');
|
||||
|
||||
afterEach(() => {
|
||||
vm.$destroy();
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
});
|
||||
|
||||
describe('with last deployment', () => {
|
||||
|
|
@ -61,7 +63,7 @@ describe('Environments block', () => {
|
|||
environment,
|
||||
});
|
||||
|
||||
expect(findText()).toEqual('This job is deployed to environment.');
|
||||
expect(findText()).toBe('This job is deployed to environment.');
|
||||
});
|
||||
|
||||
describe('when there is a cluster', () => {
|
||||
|
|
@ -74,7 +76,7 @@ describe('Environments block', () => {
|
|||
createDeploymentWithCluster(),
|
||||
);
|
||||
|
||||
expect(findText()).toEqual(
|
||||
expect(findText()).toBe(
|
||||
`This job is deployed to environment using cluster ${TEST_CLUSTER_NAME}.`,
|
||||
);
|
||||
});
|
||||
|
|
@ -89,7 +91,7 @@ describe('Environments block', () => {
|
|||
createDeploymentWithClusterAndKubernetesNamespace(),
|
||||
);
|
||||
|
||||
expect(findText()).toEqual(
|
||||
expect(findText()).toBe(
|
||||
`This job is deployed to environment using cluster ${TEST_CLUSTER_NAME} and namespace ${TEST_KUBERNETES_NAMESPACE}.`,
|
||||
);
|
||||
});
|
||||
|
|
@ -105,11 +107,11 @@ describe('Environments block', () => {
|
|||
environment: createEnvironmentWithLastDeployment(),
|
||||
});
|
||||
|
||||
expect(findText()).toEqual(
|
||||
expect(findText()).toBe(
|
||||
'This job is an out-of-date deployment to environment. View the most recent deployment.',
|
||||
);
|
||||
|
||||
expect(findJobDeploymentLink().getAttribute('href')).toEqual('bar');
|
||||
expect(findJobDeploymentLink().attributes('href')).toBe('bar');
|
||||
});
|
||||
|
||||
describe('when there is a cluster', () => {
|
||||
|
|
@ -122,7 +124,7 @@ describe('Environments block', () => {
|
|||
createDeploymentWithCluster(),
|
||||
);
|
||||
|
||||
expect(findText()).toEqual(
|
||||
expect(findText()).toBe(
|
||||
`This job is an out-of-date deployment to environment using cluster ${TEST_CLUSTER_NAME}. View the most recent deployment.`,
|
||||
);
|
||||
});
|
||||
|
|
@ -137,7 +139,7 @@ describe('Environments block', () => {
|
|||
createDeploymentWithClusterAndKubernetesNamespace(),
|
||||
);
|
||||
|
||||
expect(findText()).toEqual(
|
||||
expect(findText()).toBe(
|
||||
`This job is an out-of-date deployment to environment using cluster ${TEST_CLUSTER_NAME} and namespace ${TEST_KUBERNETES_NAMESPACE}. View the most recent deployment.`,
|
||||
);
|
||||
});
|
||||
|
|
@ -152,7 +154,7 @@ describe('Environments block', () => {
|
|||
environment,
|
||||
});
|
||||
|
||||
expect(findText()).toEqual('This job is an out-of-date deployment to environment.');
|
||||
expect(findText()).toBe('This job is an out-of-date deployment to environment.');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -164,7 +166,7 @@ describe('Environments block', () => {
|
|||
environment,
|
||||
});
|
||||
|
||||
expect(findText()).toEqual('The deployment of this job to environment did not succeed.');
|
||||
expect(findText()).toBe('The deployment of this job to environment did not succeed.');
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -176,13 +178,15 @@ describe('Environments block', () => {
|
|||
environment: createEnvironmentWithLastDeployment(),
|
||||
});
|
||||
|
||||
expect(findText()).toEqual(
|
||||
expect(findText()).toBe(
|
||||
'This job is creating a deployment to environment. This will overwrite the latest deployment.',
|
||||
);
|
||||
|
||||
expect(findJobDeploymentLink().getAttribute('href')).toEqual('bar');
|
||||
expect(findEnvironmentLink().getAttribute('href')).toEqual(environment.environment_path);
|
||||
expect(findClusterLink()).toBeNull();
|
||||
expect(findEnvironmentLink().attributes('href')).toBe(environment.environment_path);
|
||||
|
||||
expect(findJobDeploymentLink().attributes('href')).toBe('bar');
|
||||
|
||||
expect(findClusterLink().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -193,7 +197,7 @@ describe('Environments block', () => {
|
|||
environment,
|
||||
});
|
||||
|
||||
expect(findText()).toEqual('This job is creating a deployment to environment.');
|
||||
expect(findText()).toBe('This job is creating a deployment to environment.');
|
||||
});
|
||||
|
||||
describe('when there is a cluster', () => {
|
||||
|
|
@ -206,7 +210,7 @@ describe('Environments block', () => {
|
|||
createDeploymentWithCluster(),
|
||||
);
|
||||
|
||||
expect(findText()).toEqual(
|
||||
expect(findText()).toBe(
|
||||
`This job is creating a deployment to environment using cluster ${TEST_CLUSTER_NAME}.`,
|
||||
);
|
||||
});
|
||||
|
|
@ -220,7 +224,7 @@ describe('Environments block', () => {
|
|||
environment: null,
|
||||
});
|
||||
|
||||
expect(findEnvironmentLink()).toBeNull();
|
||||
expect(findEnvironmentLink().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -235,11 +239,11 @@ describe('Environments block', () => {
|
|||
createDeploymentWithCluster(),
|
||||
);
|
||||
|
||||
expect(findText()).toEqual(
|
||||
expect(findText()).toBe(
|
||||
`This job is deployed to environment using cluster ${TEST_CLUSTER_NAME}.`,
|
||||
);
|
||||
|
||||
expect(findClusterLink().getAttribute('href')).toEqual(TEST_CLUSTER_PATH);
|
||||
expect(findClusterLink().attributes('href')).toBe(TEST_CLUSTER_PATH);
|
||||
});
|
||||
|
||||
describe('when the cluster is missing the path', () => {
|
||||
|
|
@ -254,7 +258,7 @@ describe('Environments block', () => {
|
|||
|
||||
expect(findText()).toContain('using cluster the-cluster.');
|
||||
|
||||
expect(findClusterLink()).toBeNull();
|
||||
expect(findClusterLink().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { GlLink } from '@gitlab/ui';
|
||||
import { GlBadge, GlLink } from '@gitlab/ui';
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import StuckBlock from '~/jobs/components/stuck_block.vue';
|
||||
|
||||
|
|
@ -27,7 +27,7 @@ describe('Stuck Block Job component', () => {
|
|||
const findStuckNoRunners = () => wrapper.find('[data-testid="job-stuck-no-runners"]');
|
||||
const findStuckWithTags = () => wrapper.find('[data-testid="job-stuck-with-tags"]');
|
||||
const findRunnerPathLink = () => wrapper.find(GlLink);
|
||||
const findAllBadges = () => wrapper.findAll('[data-testid="badge"]');
|
||||
const findAllBadges = () => wrapper.findAll(GlBadge);
|
||||
|
||||
describe('with no runners for project', () => {
|
||||
beforeEach(() => {
|
||||
|
|
|
|||
|
|
@ -2,45 +2,18 @@ import SnippetHeader from '~/snippets/components/snippet_header.vue';
|
|||
import DeleteSnippetMutation from '~/snippets/mutations/deleteSnippet.mutation.graphql';
|
||||
import { ApolloMutation } from 'vue-apollo';
|
||||
import { GlButton, GlModal } from '@gitlab/ui';
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import { mount } from '@vue/test-utils';
|
||||
import { Blob, BinaryBlob } from 'jest/blob/components/mock_data';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
|
||||
describe('Snippet header component', () => {
|
||||
let wrapper;
|
||||
const snippet = {
|
||||
id: 'gid://gitlab/PersonalSnippet/50',
|
||||
title: 'The property of Thor',
|
||||
visibilityLevel: 'private',
|
||||
webUrl: 'http://personal.dev.null/42',
|
||||
userPermissions: {
|
||||
adminSnippet: true,
|
||||
updateSnippet: true,
|
||||
reportSnippet: false,
|
||||
},
|
||||
project: null,
|
||||
author: {
|
||||
name: 'Thor Odinson',
|
||||
},
|
||||
blobs: [Blob],
|
||||
};
|
||||
const mutationVariables = {
|
||||
mutation: DeleteSnippetMutation,
|
||||
variables: {
|
||||
id: snippet.id,
|
||||
},
|
||||
};
|
||||
const errorMsg = 'Foo bar';
|
||||
const err = { message: errorMsg };
|
||||
let snippet;
|
||||
let mutationTypes;
|
||||
let mutationVariables;
|
||||
|
||||
const resolveMutate = jest.fn(() =>
|
||||
Promise.resolve({ data: { destroySnippet: { errors: [] } } }),
|
||||
);
|
||||
const rejectMutation = jest.fn(() => Promise.reject(err));
|
||||
|
||||
const mutationTypes = {
|
||||
RESOLVE: resolveMutate,
|
||||
REJECT: rejectMutation,
|
||||
};
|
||||
let errorMsg;
|
||||
let err;
|
||||
|
||||
function createComponent({
|
||||
loading = false,
|
||||
|
|
@ -63,7 +36,7 @@ describe('Snippet header component', () => {
|
|||
mutate: mutationRes,
|
||||
};
|
||||
|
||||
wrapper = shallowMount(SnippetHeader, {
|
||||
wrapper = mount(SnippetHeader, {
|
||||
mocks: { $apollo },
|
||||
propsData: {
|
||||
snippet: {
|
||||
|
|
@ -76,6 +49,41 @@ describe('Snippet header component', () => {
|
|||
});
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
snippet = {
|
||||
id: 'gid://gitlab/PersonalSnippet/50',
|
||||
title: 'The property of Thor',
|
||||
visibilityLevel: 'private',
|
||||
webUrl: 'http://personal.dev.null/42',
|
||||
userPermissions: {
|
||||
adminSnippet: true,
|
||||
updateSnippet: true,
|
||||
reportSnippet: false,
|
||||
},
|
||||
project: null,
|
||||
author: {
|
||||
name: 'Thor Odinson',
|
||||
},
|
||||
blobs: [Blob],
|
||||
createdAt: new Date(Date.now() - 32 * 24 * 3600 * 1000).toISOString(),
|
||||
};
|
||||
|
||||
mutationVariables = {
|
||||
mutation: DeleteSnippetMutation,
|
||||
variables: {
|
||||
id: snippet.id,
|
||||
},
|
||||
};
|
||||
|
||||
errorMsg = 'Foo bar';
|
||||
err = { message: errorMsg };
|
||||
|
||||
mutationTypes = {
|
||||
RESOLVE: jest.fn(() => Promise.resolve({ data: { destroySnippet: { errors: [] } } })),
|
||||
REJECT: jest.fn(() => Promise.reject(err)),
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
});
|
||||
|
|
@ -85,6 +93,23 @@ describe('Snippet header component', () => {
|
|||
expect(wrapper.find('.detail-page-header').exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('renders a message showing snippet creation date and author', () => {
|
||||
createComponent();
|
||||
|
||||
const text = wrapper.find('[data-testid="authored-message"]').text();
|
||||
expect(text).toContain('Authored 1 month ago by');
|
||||
expect(text).toContain('Thor Odinson');
|
||||
});
|
||||
|
||||
it('renders a message showing only snippet creation date if author is null', () => {
|
||||
snippet.author = null;
|
||||
|
||||
createComponent();
|
||||
|
||||
const text = wrapper.find('[data-testid="authored-message"]').text();
|
||||
expect(text).toBe('Authored 1 month ago');
|
||||
});
|
||||
|
||||
it('renders action buttons based on permissions', () => {
|
||||
createComponent({
|
||||
permissions: {
|
||||
|
|
@ -163,14 +188,15 @@ describe('Snippet header component', () => {
|
|||
expect(mutationTypes.RESOLVE).toHaveBeenCalledWith(mutationVariables);
|
||||
});
|
||||
|
||||
it('sets error message if mutation fails', () => {
|
||||
it('sets error message if mutation fails', async () => {
|
||||
createComponent({ mutationRes: mutationTypes.REJECT });
|
||||
expect(Boolean(wrapper.vm.errorMessage)).toBe(false);
|
||||
|
||||
wrapper.vm.deleteSnippet();
|
||||
return wrapper.vm.$nextTick().then(() => {
|
||||
expect(wrapper.vm.errorMessage).toEqual(errorMsg);
|
||||
});
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(wrapper.vm.errorMessage).toEqual(errorMsg);
|
||||
});
|
||||
|
||||
describe('in case of successful mutation, closes modal and redirects to correct listing', () => {
|
||||
|
|
|
|||
|
|
@ -327,4 +327,12 @@ RSpec.describe IssuablesHelper do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#sidebar_milestone_tooltip_label' do
|
||||
it 'escapes HTML in the milestone title' do
|
||||
milestone = build(:milestone, title: '<img onerror=alert(1)>')
|
||||
|
||||
expect(helper.sidebar_milestone_tooltip_label(milestone)).to eq('<img onerror=alert(1)><br/>Milestone')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -75,6 +75,12 @@ RSpec.describe Banzai::Filter::IssueReferenceFilter do
|
|||
expect(doc.text).to eq "Issue #{reference}"
|
||||
end
|
||||
|
||||
it 'renders non-HTML tooltips' do
|
||||
doc = reference_filter("Issue #{reference}")
|
||||
|
||||
expect(doc.at_css('a')).not_to have_attribute('data-html')
|
||||
end
|
||||
|
||||
it 'includes default classes' do
|
||||
doc = reference_filter("Issue #{reference}")
|
||||
expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-issue has-tooltip'
|
||||
|
|
|
|||
|
|
@ -19,6 +19,29 @@ RSpec.describe Gitlab::Checks::BranchCheck do
|
|||
end
|
||||
end
|
||||
|
||||
context "prohibited branches check" do
|
||||
it "prohibits 40-character hexadecimal branch names" do
|
||||
allow(subject).to receive(:branch_name).and_return("267208abfe40e546f5e847444276f7d43a39503e")
|
||||
|
||||
expect { subject.validate! }.to raise_error(Gitlab::GitAccess::ForbiddenError, "You cannot create a branch with a 40-character hexadecimal branch name.")
|
||||
end
|
||||
|
||||
it "doesn't prohibit a nested hexadecimal in a branch name" do
|
||||
allow(subject).to receive(:branch_name).and_return("fix-267208abfe40e546f5e847444276f7d43a39503e")
|
||||
|
||||
expect { subject.validate! }.not_to raise_error
|
||||
end
|
||||
|
||||
context "the feature flag is disabled" do
|
||||
it "doesn't prohibit a 40-character hexadecimal branch name" do
|
||||
stub_feature_flags(prohibit_hexadecimal_branch_names: false)
|
||||
allow(subject).to receive(:branch_name).and_return("267208abfe40e546f5e847444276f7d43a39503e")
|
||||
|
||||
expect { subject.validate! }.not_to raise_error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'protected branches check' do
|
||||
before do
|
||||
allow(ProtectedBranch).to receive(:protected?).with(project, 'master').and_return(true)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,58 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator do
|
||||
let_it_be(:filepath) { File.join(Dir.tmpdir, 'decompressed_archive_size_validator_spec.gz') }
|
||||
|
||||
before(:all) do
|
||||
create_compressed_file
|
||||
end
|
||||
|
||||
after(:all) do
|
||||
FileUtils.rm(filepath)
|
||||
end
|
||||
|
||||
subject { described_class.new(archive_path: filepath, max_bytes: max_bytes) }
|
||||
|
||||
describe '#valid?' do
|
||||
let(:max_bytes) { 1 }
|
||||
|
||||
context 'when file does not exceed allowed decompressed size' do
|
||||
let(:max_bytes) { 20 }
|
||||
|
||||
it 'returns true' do
|
||||
expect(subject.valid?).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when file exceeds allowed decompressed size' do
|
||||
it 'returns false' do
|
||||
expect(subject.valid?).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when something goes wrong during decompression' do
|
||||
before do
|
||||
allow(subject.archive_file).to receive(:eof?).and_raise(StandardError)
|
||||
end
|
||||
|
||||
it 'logs and tracks raised exception' do
|
||||
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(instance_of(StandardError))
|
||||
expect(Gitlab::Import::Logger).to receive(:info).with(hash_including(message: 'Decompressed archive size validation failed.'))
|
||||
|
||||
subject.valid?
|
||||
end
|
||||
|
||||
it 'returns false' do
|
||||
expect(subject.valid?).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def create_compressed_file
|
||||
Zlib::GzipWriter.open(filepath) do |gz|
|
||||
gz.write('Hello World!')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -98,6 +98,45 @@ RSpec.describe Gitlab::ImportExport::FileImporter do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when file exceeds acceptable decompressed size' do
|
||||
let(:project) { create(:project) }
|
||||
let(:shared) { Gitlab::ImportExport::Shared.new(project) }
|
||||
let(:filepath) { File.join(Dir.tmpdir, 'file_importer_spec.gz') }
|
||||
|
||||
subject { described_class.new(importable: project, archive_file: filepath, shared: shared) }
|
||||
|
||||
before do
|
||||
Zlib::GzipWriter.open(filepath) do |gz|
|
||||
gz.write('Hello World!')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when validate_import_decompressed_archive_size feature flag is enabled' do
|
||||
before do
|
||||
stub_feature_flags(validate_import_decompressed_archive_size: true)
|
||||
|
||||
allow(Gitlab::ImportExport::DecompressedArchiveSizeValidator).to receive(:max_bytes).and_return(1)
|
||||
end
|
||||
|
||||
it 'returns false' do
|
||||
expect(subject.import).to eq(false)
|
||||
expect(shared.errors.join).to eq('Decompressed archive size validation failed.')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when validate_import_decompressed_archive_size feature flag is disabled' do
|
||||
before do
|
||||
stub_feature_flags(validate_import_decompressed_archive_size: false)
|
||||
end
|
||||
|
||||
it 'skips validation' do
|
||||
expect(subject).to receive(:validate_decompressed_archive_size).never
|
||||
|
||||
subject.import
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def setup_files
|
||||
FileUtils.mkdir_p("#{shared.export_path}/subfolder/")
|
||||
FileUtils.touch(valid_file)
|
||||
|
|
|
|||
|
|
@ -659,6 +659,7 @@ PrometheusMetric:
|
|||
- group
|
||||
- common
|
||||
- identifier
|
||||
- dashboard_path
|
||||
PrometheusAlert:
|
||||
- threshold
|
||||
- operator
|
||||
|
|
|
|||
|
|
@ -80,13 +80,13 @@ RSpec.describe Gitlab::Kubernetes::KubeClient do
|
|||
context 'errored' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
where(:error, :error_status) do
|
||||
SocketError | :unreachable
|
||||
OpenSSL::X509::CertificateError | :authentication_failure
|
||||
StandardError | :unknown_failure
|
||||
Kubeclient::HttpError.new(408, "timed out", nil) | :unreachable
|
||||
Kubeclient::HttpError.new(408, "timeout", nil) | :unreachable
|
||||
Kubeclient::HttpError.new(408, "", nil) | :authentication_failure
|
||||
where(:error, :connection_status, :error_status) do
|
||||
SocketError | :unreachable | :connection_error
|
||||
OpenSSL::X509::CertificateError | :authentication_failure | :authentication_error
|
||||
StandardError | :unknown_failure | :unknown_error
|
||||
Kubeclient::HttpError.new(408, "timed out", nil) | :unreachable | :http_error
|
||||
Kubeclient::HttpError.new(408, "timeout", nil) | :unreachable | :http_error
|
||||
Kubeclient::HttpError.new(408, "", nil) | :authentication_failure | :http_error
|
||||
end
|
||||
|
||||
with_them do
|
||||
|
|
@ -97,7 +97,7 @@ RSpec.describe Gitlab::Kubernetes::KubeClient do
|
|||
it 'returns error status' do
|
||||
result = described_class.graceful_request(1) { client.foo }
|
||||
|
||||
expect(result).to eq({ status: error_status })
|
||||
expect(result).to eq({ status: connection_status, connection_error: error_status })
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,45 +7,51 @@ RSpec.describe Gitlab::Kubernetes::Node do
|
|||
|
||||
describe '#all' do
|
||||
let(:cluster) { create(:cluster, :provided_by_user, :group) }
|
||||
let(:expected_nodes) { [] }
|
||||
let(:expected_nodes) { nil }
|
||||
let(:nodes) { [kube_node.merge(kube_node_metrics)] }
|
||||
|
||||
subject { described_class.new(cluster).all }
|
||||
|
||||
before do
|
||||
stub_kubeclient_nodes_and_nodes_metrics(cluster.platform.api_url)
|
||||
end
|
||||
|
||||
subject { described_class.new(cluster).all }
|
||||
|
||||
context 'when connection to the cluster is successful' do
|
||||
let(:expected_nodes) { [kube_node.merge(kube_node_metrics)] }
|
||||
let(:expected_nodes) { { nodes: nodes } }
|
||||
|
||||
it { is_expected.to eq(expected_nodes) }
|
||||
end
|
||||
|
||||
context 'when cluster cannot be reached' do
|
||||
before do
|
||||
allow(cluster.kubeclient.core_client).to receive(:discover)
|
||||
.and_raise(SocketError)
|
||||
context 'when there is a connection error' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
where(:error, :error_status) do
|
||||
SocketError | :kubernetes_connection_error
|
||||
OpenSSL::X509::CertificateError | :kubernetes_authentication_error
|
||||
StandardError | :unknown_error
|
||||
Kubeclient::HttpError.new(408, "", nil) | :kubeclient_http_error
|
||||
end
|
||||
|
||||
it { is_expected.to eq(expected_nodes) }
|
||||
end
|
||||
context 'when there is an error while querying nodes' do
|
||||
with_them do
|
||||
before do
|
||||
allow(cluster.kubeclient).to receive(:get_nodes).and_raise(error)
|
||||
end
|
||||
|
||||
context 'when cluster cannot be authenticated to' do
|
||||
before do
|
||||
allow(cluster.kubeclient.core_client).to receive(:discover)
|
||||
.and_raise(OpenSSL::X509::CertificateError.new('Certificate error'))
|
||||
it { is_expected.to eq({ node_connection_error: error_status }) }
|
||||
end
|
||||
end
|
||||
|
||||
it { is_expected.to eq(expected_nodes) }
|
||||
end
|
||||
context 'when there is an error while querying metrics' do
|
||||
with_them do
|
||||
before do
|
||||
allow(cluster.kubeclient).to receive(:get_nodes).and_return({ response: nodes })
|
||||
allow(cluster.kubeclient).to receive(:metrics_client).and_raise(error)
|
||||
end
|
||||
|
||||
context 'when Kubeclient::HttpError is raised' do
|
||||
before do
|
||||
allow(cluster.kubeclient.core_client).to receive(:discover)
|
||||
.and_raise(Kubeclient::HttpError.new(403, 'Forbidden', nil))
|
||||
it { is_expected.to eq({ nodes: nodes, metrics_connection_error: error_status }) }
|
||||
end
|
||||
end
|
||||
|
||||
it { is_expected.to eq(expected_nodes) }
|
||||
end
|
||||
|
||||
context 'when an uncategorised error is raised' do
|
||||
|
|
@ -54,7 +60,7 @@ RSpec.describe Gitlab::Kubernetes::Node do
|
|||
.and_raise(StandardError)
|
||||
end
|
||||
|
||||
it { is_expected.to eq(expected_nodes) }
|
||||
it { is_expected.to eq({ node_connection_error: :unknown_error }) }
|
||||
|
||||
it 'notifies Sentry' do
|
||||
expect(Gitlab::ErrorTracking).to receive(:track_exception)
|
||||
|
|
|
|||
|
|
@ -45,6 +45,21 @@ RSpec.describe Notify do
|
|||
end
|
||||
end
|
||||
|
||||
shared_examples 'it requires a group' do
|
||||
context 'when given an deleted group' do
|
||||
before do
|
||||
# destroy group and group member
|
||||
group_member.destroy!
|
||||
group.destroy!
|
||||
end
|
||||
|
||||
it 'returns NullMail type message' do
|
||||
expect(Gitlab::AppLogger).to receive(:info)
|
||||
expect(subject.message).to be_a(ActionMailer::Base::NullMail)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'for a project' do
|
||||
shared_examples 'an assignee email' do
|
||||
let(:recipient) { assignee }
|
||||
|
|
@ -1388,6 +1403,7 @@ RSpec.describe Notify do
|
|||
it_behaves_like "a user cannot unsubscribe through footer link"
|
||||
it_behaves_like 'appearance header and footer enabled'
|
||||
it_behaves_like 'appearance header and footer not enabled'
|
||||
it_behaves_like 'it requires a group'
|
||||
|
||||
it 'contains all the useful information' do
|
||||
is_expected.to have_subject "Access to the #{group.name} group was granted"
|
||||
|
|
@ -1422,6 +1438,7 @@ RSpec.describe Notify do
|
|||
it_behaves_like "a user cannot unsubscribe through footer link"
|
||||
it_behaves_like 'appearance header and footer enabled'
|
||||
it_behaves_like 'appearance header and footer not enabled'
|
||||
it_behaves_like 'it requires a group'
|
||||
|
||||
it 'contains all the useful information' do
|
||||
is_expected.to have_subject "Invitation to join the #{group.name} group"
|
||||
|
|
@ -1448,6 +1465,7 @@ RSpec.describe Notify do
|
|||
it_behaves_like "a user cannot unsubscribe through footer link"
|
||||
it_behaves_like 'appearance header and footer enabled'
|
||||
it_behaves_like 'appearance header and footer not enabled'
|
||||
it_behaves_like 'it requires a group'
|
||||
|
||||
it 'contains all the useful information' do
|
||||
is_expected.to have_subject 'Invitation accepted'
|
||||
|
|
|
|||
|
|
@ -1153,6 +1153,57 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#connection_error' do
|
||||
let(:cluster) { create(:cluster) }
|
||||
let(:error) { :unknown_error }
|
||||
|
||||
subject { cluster.connection_error }
|
||||
|
||||
it { is_expected.to be_nil }
|
||||
|
||||
context 'with a cached status' do
|
||||
before do
|
||||
stub_reactive_cache(cluster, connection_error: error)
|
||||
end
|
||||
|
||||
it { is_expected.to eq(error) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#node_connection_error' do
|
||||
let(:cluster) { create(:cluster) }
|
||||
let(:error) { :unknown_error }
|
||||
|
||||
subject { cluster.node_connection_error }
|
||||
|
||||
it { is_expected.to be_nil }
|
||||
|
||||
context 'with a cached status' do
|
||||
before do
|
||||
stub_reactive_cache(cluster, node_connection_error: error)
|
||||
end
|
||||
|
||||
it { is_expected.to eq(error) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#metrics_connection_error' do
|
||||
let(:cluster) { create(:cluster) }
|
||||
let(:error) { :unknown_error }
|
||||
|
||||
subject { cluster.metrics_connection_error }
|
||||
|
||||
it { is_expected.to be_nil }
|
||||
|
||||
context 'with a cached status' do
|
||||
before do
|
||||
stub_reactive_cache(cluster, metrics_connection_error: error)
|
||||
end
|
||||
|
||||
it { is_expected.to eq(error) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#nodes' do
|
||||
let(:cluster) { create(:cluster) }
|
||||
|
||||
|
|
@ -1186,43 +1237,49 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
|
|||
context 'cluster is enabled' do
|
||||
let(:cluster) { create(:cluster, :provided_by_user, :group) }
|
||||
let(:gl_k8s_node_double) { double(Gitlab::Kubernetes::Node) }
|
||||
let(:expected_nodes) { nil }
|
||||
let(:expected_nodes) { {} }
|
||||
|
||||
before do
|
||||
stub_kubeclient_discover(cluster.platform.api_url)
|
||||
allow(Gitlab::Kubernetes::Node).to receive(:new).with(cluster).and_return(gl_k8s_node_double)
|
||||
allow(gl_k8s_node_double).to receive(:all).and_return([])
|
||||
allow(gl_k8s_node_double).to receive(:all).and_return(expected_nodes)
|
||||
end
|
||||
|
||||
context 'connection to the cluster is successful' do
|
||||
let(:expected_nodes) { { nodes: [kube_node.merge(kube_node_metrics)] } }
|
||||
let(:connection_status) { { connection_status: :connected } }
|
||||
|
||||
before do
|
||||
allow(gl_k8s_node_double).to receive(:all).and_return(expected_nodes)
|
||||
end
|
||||
|
||||
let(:expected_nodes) { [kube_node.merge(kube_node_metrics)] }
|
||||
|
||||
it { is_expected.to eq(connection_status: :connected, nodes: expected_nodes) }
|
||||
it { is_expected.to eq(**connection_status, **expected_nodes) }
|
||||
end
|
||||
|
||||
context 'cluster cannot be reached' do
|
||||
let(:connection_status) { { connection_status: :unreachable, connection_error: :connection_error } }
|
||||
|
||||
before do
|
||||
allow(cluster.kubeclient.core_client).to receive(:discover)
|
||||
.and_raise(SocketError)
|
||||
end
|
||||
|
||||
it { is_expected.to eq(connection_status: :unreachable, nodes: expected_nodes) }
|
||||
it { is_expected.to eq(**connection_status, **expected_nodes) }
|
||||
end
|
||||
|
||||
context 'cluster cannot be authenticated to' do
|
||||
let(:connection_status) { { connection_status: :authentication_failure, connection_error: :authentication_error } }
|
||||
|
||||
before do
|
||||
allow(cluster.kubeclient.core_client).to receive(:discover)
|
||||
.and_raise(OpenSSL::X509::CertificateError.new("Certificate error"))
|
||||
end
|
||||
|
||||
it { is_expected.to eq(connection_status: :authentication_failure, nodes: expected_nodes) }
|
||||
it { is_expected.to eq(**connection_status, **expected_nodes) }
|
||||
end
|
||||
|
||||
describe 'Kubeclient::HttpError' do
|
||||
let(:connection_status) { { connection_status: :authentication_failure, connection_error: :http_error } }
|
||||
let(:error_code) { 403 }
|
||||
let(:error_message) { "Forbidden" }
|
||||
|
||||
|
|
@ -1231,28 +1288,32 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
|
|||
.and_raise(Kubeclient::HttpError.new(error_code, error_message, nil))
|
||||
end
|
||||
|
||||
it { is_expected.to eq(connection_status: :authentication_failure, nodes: expected_nodes) }
|
||||
it { is_expected.to eq(**connection_status, **expected_nodes) }
|
||||
|
||||
context 'generic timeout' do
|
||||
let(:connection_status) { { connection_status: :unreachable, connection_error: :http_error } }
|
||||
let(:error_message) { 'Timed out connecting to server'}
|
||||
|
||||
it { is_expected.to eq(connection_status: :unreachable, nodes: expected_nodes) }
|
||||
it { is_expected.to eq(**connection_status, **expected_nodes) }
|
||||
end
|
||||
|
||||
context 'gateway timeout' do
|
||||
let(:connection_status) { { connection_status: :unreachable, connection_error: :http_error } }
|
||||
let(:error_message) { '504 Gateway Timeout for GET https://kubernetes.example.com/api/v1'}
|
||||
|
||||
it { is_expected.to eq(connection_status: :unreachable, nodes: expected_nodes) }
|
||||
it { is_expected.to eq(**connection_status, **expected_nodes) }
|
||||
end
|
||||
end
|
||||
|
||||
context 'an uncategorised error is raised' do
|
||||
let(:connection_status) { { connection_status: :unknown_failure, connection_error: :unknown_error } }
|
||||
|
||||
before do
|
||||
allow(cluster.kubeclient.core_client).to receive(:discover)
|
||||
.and_raise(StandardError)
|
||||
end
|
||||
|
||||
it { is_expected.to eq(connection_status: :unknown_failure, nodes: expected_nodes) }
|
||||
it { is_expected.to eq(**connection_status, **expected_nodes) }
|
||||
|
||||
it 'notifies Sentry' do
|
||||
expect(Gitlab::ErrorTracking).to receive(:track_exception)
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ RSpec.describe 'Git LFS API and storage' do
|
|||
'X-Sendfile-Type' => sendfile
|
||||
}.compact
|
||||
end
|
||||
let(:include_workhorse_jwt_header) { true }
|
||||
let(:authorization) { }
|
||||
let(:sendfile) { }
|
||||
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
|
||||
|
|
@ -1075,14 +1076,24 @@ RSpec.describe 'Git LFS API and storage' do
|
|||
end
|
||||
end
|
||||
|
||||
context 'invalid tempfiles' do
|
||||
context 'without the lfs object' do
|
||||
before do
|
||||
lfs_object.destroy
|
||||
end
|
||||
|
||||
it 'rejects slashes in the tempfile name (path traversal)' do
|
||||
put_finalize('../bar', with_tempfile: true)
|
||||
expect(response).to have_gitlab_http_status(:forbidden)
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
end
|
||||
|
||||
context 'not sending the workhorse jwt header' do
|
||||
let(:include_workhorse_jwt_header) { false }
|
||||
|
||||
it 'rejects the request' do
|
||||
put_finalize(with_tempfile: true)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:unprocessable_entity)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1308,7 +1319,8 @@ RSpec.describe 'Git LFS API and storage' do
|
|||
method: :put,
|
||||
file_key: :file,
|
||||
params: args.merge(file: uploaded_file),
|
||||
headers: finalize_headers
|
||||
headers: finalize_headers,
|
||||
send_rewritten_field: include_workhorse_jwt_header
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,35 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe ClusterErrorEntity do
|
||||
describe '#as_json' do
|
||||
let(:cluster) { create(:cluster, :provided_by_user, :group) }
|
||||
|
||||
subject { described_class.new(cluster).as_json }
|
||||
|
||||
context 'when connection_error is present' do
|
||||
before do
|
||||
allow(cluster).to receive(:connection_error).and_return(:connection_error)
|
||||
end
|
||||
|
||||
it { is_expected.to eq({ connection_error: :connection_error, metrics_connection_error: nil, node_connection_error: nil }) }
|
||||
end
|
||||
|
||||
context 'when metrics_connection_error is present' do
|
||||
before do
|
||||
allow(cluster).to receive(:metrics_connection_error).and_return(:http_error)
|
||||
end
|
||||
|
||||
it { is_expected.to eq({ connection_error: nil, metrics_connection_error: :http_error, node_connection_error: nil }) }
|
||||
end
|
||||
|
||||
context 'when node_connection_error is present' do
|
||||
before do
|
||||
allow(cluster).to receive(:node_connection_error).and_return(:unknown_error)
|
||||
end
|
||||
|
||||
it { is_expected.to eq({ connection_error: nil, metrics_connection_error: nil, node_connection_error: :unknown_error }) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -14,6 +14,7 @@ RSpec.describe ClusterSerializer do
|
|||
:enabled,
|
||||
:environment_scope,
|
||||
:gitlab_managed_apps_logs_path,
|
||||
:kubernetes_errors,
|
||||
:name,
|
||||
:nodes,
|
||||
:path,
|
||||
|
|
|
|||
|
|
@ -13,8 +13,9 @@ RSpec.describe AuthorizedProjectUpdate::ProjectGroupLinkCreateService do
|
|||
let_it_be(:project) { create(:project, :private, group: create(:group, :private)) }
|
||||
|
||||
let(:access_level) { Gitlab::Access::MAINTAINER }
|
||||
let(:group_access) { nil }
|
||||
|
||||
subject(:service) { described_class.new(project, group) }
|
||||
subject(:service) { described_class.new(project, group, group_access) }
|
||||
|
||||
describe '#perform' do
|
||||
context 'direct group members' do
|
||||
|
|
@ -54,6 +55,26 @@ RSpec.describe AuthorizedProjectUpdate::ProjectGroupLinkCreateService do
|
|||
end
|
||||
end
|
||||
|
||||
context 'with group_access' do
|
||||
let(:group_access) { Gitlab::Access::REPORTER }
|
||||
|
||||
before do
|
||||
create(:group_member, access_level: access_level, group: group_parent, user: parent_group_user)
|
||||
ProjectAuthorization.delete_all
|
||||
end
|
||||
|
||||
it 'creates project authorization' do
|
||||
expect { service.execute }.to(
|
||||
change { ProjectAuthorization.count }.from(0).to(1))
|
||||
|
||||
project_authorization = ProjectAuthorization.where(
|
||||
project_id: project.id,
|
||||
user_id: parent_group_user.id,
|
||||
access_level: group_access)
|
||||
expect(project_authorization).to exist
|
||||
end
|
||||
end
|
||||
|
||||
context 'membership overrides' do
|
||||
before do
|
||||
create(:group_member, access_level: Gitlab::Access::REPORTER, group: group_parent, user: group_user)
|
||||
|
|
|
|||
|
|
@ -414,44 +414,117 @@ RSpec.describe Groups::TransferService do
|
|||
end
|
||||
|
||||
context 'when transferring a group with nested groups and projects' do
|
||||
let!(:group) { create(:group, :public) }
|
||||
let(:subgroup1) { create(:group, :private, parent: group) }
|
||||
let!(:project1) { create(:project, :repository, :private, namespace: group) }
|
||||
let!(:subgroup1) { create(:group, :private, parent: group) }
|
||||
let!(:nested_subgroup) { create(:group, :private, parent: subgroup1) }
|
||||
let!(:nested_project) { create(:project, :repository, :private, namespace: subgroup1) }
|
||||
|
||||
before do
|
||||
TestEnv.clean_test_path
|
||||
create(:group_member, :owner, group: new_parent_group, user: user)
|
||||
transfer_service.execute(new_parent_group)
|
||||
end
|
||||
|
||||
it 'updates subgroups path' do
|
||||
new_base_path = "#{new_parent_group.path}/#{group.path}"
|
||||
group.children.each do |children|
|
||||
expect(children.full_path).to eq("#{new_base_path}/#{children.path}")
|
||||
context 'updated paths' do
|
||||
let(:group) { create(:group, :public) }
|
||||
|
||||
before do
|
||||
transfer_service.execute(new_parent_group)
|
||||
end
|
||||
|
||||
new_base_path = "#{new_parent_group.path}/#{group.path}/#{subgroup1.path}"
|
||||
subgroup1.children.each do |children|
|
||||
expect(children.full_path).to eq("#{new_base_path}/#{children.path}")
|
||||
it 'updates subgroups path' do
|
||||
new_base_path = "#{new_parent_group.path}/#{group.path}"
|
||||
group.children.each do |children|
|
||||
expect(children.full_path).to eq("#{new_base_path}/#{children.path}")
|
||||
end
|
||||
|
||||
new_base_path = "#{new_parent_group.path}/#{group.path}/#{subgroup1.path}"
|
||||
subgroup1.children.each do |children|
|
||||
expect(children.full_path).to eq("#{new_base_path}/#{children.path}")
|
||||
end
|
||||
end
|
||||
|
||||
it 'updates projects path' do
|
||||
new_parent_path = "#{new_parent_group.path}/#{group.path}"
|
||||
subgroup1.projects.each do |project|
|
||||
project_full_path = "#{new_parent_path}/#{project.namespace.path}/#{project.name}"
|
||||
expect(project.full_path).to eq(project_full_path)
|
||||
end
|
||||
end
|
||||
|
||||
it 'creates redirect for the subgroups and projects' do
|
||||
expect(group.redirect_routes.count).to eq(1)
|
||||
expect(project1.redirect_routes.count).to eq(1)
|
||||
expect(subgroup1.redirect_routes.count).to eq(1)
|
||||
expect(nested_subgroup.redirect_routes.count).to eq(1)
|
||||
expect(nested_project.redirect_routes.count).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
it 'updates projects path' do
|
||||
new_parent_path = "#{new_parent_group.path}/#{group.path}"
|
||||
subgroup1.projects.each do |project|
|
||||
project_full_path = "#{new_parent_path}/#{project.namespace.path}/#{project.name}"
|
||||
expect(project.full_path).to eq(project_full_path)
|
||||
end
|
||||
end
|
||||
context 'resets project authorizations' do
|
||||
let(:old_parent_group) { create(:group) }
|
||||
let(:group) { create(:group, :private, parent: old_parent_group) }
|
||||
let(:new_group_member) { create(:user) }
|
||||
let(:old_group_member) { create(:user) }
|
||||
|
||||
it 'creates redirect for the subgroups and projects' do
|
||||
expect(group.redirect_routes.count).to eq(1)
|
||||
expect(project1.redirect_routes.count).to eq(1)
|
||||
expect(subgroup1.redirect_routes.count).to eq(1)
|
||||
expect(nested_subgroup.redirect_routes.count).to eq(1)
|
||||
expect(nested_project.redirect_routes.count).to eq(1)
|
||||
before do
|
||||
new_parent_group.add_maintainer(new_group_member)
|
||||
old_parent_group.add_maintainer(old_group_member)
|
||||
group.refresh_members_authorized_projects
|
||||
end
|
||||
|
||||
it 'removes old project authorizations' do
|
||||
expect { transfer_service.execute(new_parent_group) }.to change {
|
||||
ProjectAuthorization.where(project_id: project1.id, user_id: old_group_member.id).size
|
||||
}.from(1).to(0)
|
||||
end
|
||||
|
||||
it 'adds new project authorizations' do
|
||||
expect { transfer_service.execute(new_parent_group) }.to change {
|
||||
ProjectAuthorization.where(project_id: project1.id, user_id: new_group_member.id).size
|
||||
}.from(0).to(1)
|
||||
end
|
||||
|
||||
it 'performs authorizations job immediately' do
|
||||
expect(AuthorizedProjectsWorker).to receive(:bulk_perform_inline)
|
||||
|
||||
transfer_service.execute(new_parent_group)
|
||||
end
|
||||
|
||||
context 'for nested projects' do
|
||||
it 'removes old project authorizations' do
|
||||
expect { transfer_service.execute(new_parent_group) }.to change {
|
||||
ProjectAuthorization.where(project_id: nested_project.id, user_id: old_group_member.id).size
|
||||
}.from(1).to(0)
|
||||
end
|
||||
|
||||
it 'adds new project authorizations' do
|
||||
expect { transfer_service.execute(new_parent_group) }.to change {
|
||||
ProjectAuthorization.where(project_id: nested_project.id, user_id: new_group_member.id).size
|
||||
}.from(0).to(1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'for groups with many members' do
|
||||
before do
|
||||
11.times do
|
||||
new_parent_group.add_maintainer(create(:user))
|
||||
end
|
||||
end
|
||||
|
||||
it 'adds new project authorizations for the user which makes a transfer' do
|
||||
transfer_service.execute(new_parent_group)
|
||||
|
||||
expect(ProjectAuthorization.where(project_id: project1.id, user_id: user.id).size).to eq(1)
|
||||
expect(ProjectAuthorization.where(project_id: nested_project.id, user_id: user.id).size).to eq(1)
|
||||
end
|
||||
|
||||
it 'schedules authorizations job' do
|
||||
expect(AuthorizedProjectsWorker).to receive(:bulk_perform_async)
|
||||
.with(array_including(new_parent_group.members_with_parents.pluck(:user_id).map {|id| [id, anything] }))
|
||||
|
||||
transfer_service.execute(new_parent_group)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -6,9 +6,10 @@ RSpec.describe Projects::GroupLinks::CreateService, '#execute' do
|
|||
let_it_be(:user) { create :user }
|
||||
let_it_be(:group) { create :group }
|
||||
let_it_be(:project) { create :project }
|
||||
let(:group_access) { Gitlab::Access::DEVELOPER }
|
||||
let(:opts) do
|
||||
{
|
||||
link_group_access: '30',
|
||||
link_group_access: group_access,
|
||||
expires_at: nil
|
||||
}
|
||||
end
|
||||
|
|
@ -49,7 +50,9 @@ RSpec.describe Projects::GroupLinks::CreateService, '#execute' do
|
|||
receive(:bulk_perform_async)
|
||||
)
|
||||
expect(AuthorizedProjectUpdate::ProjectGroupLinkCreateWorker).to(
|
||||
receive(:perform_async).and_call_original
|
||||
receive(:perform_async)
|
||||
.with(project.id, group.id, group_access)
|
||||
.and_call_original
|
||||
)
|
||||
expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).to(
|
||||
receive(:bulk_perform_in)
|
||||
|
|
|
|||
Loading…
Reference in New Issue