Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
ed899a6a1e
commit
a2c1e6d328
|
|
@ -167,19 +167,6 @@ Layout/ArgumentAlignment:
|
|||
- 'ee/spec/requests/api/deployments_spec.rb'
|
||||
- 'ee/spec/requests/api/dora/metrics_spec.rb'
|
||||
- 'ee/spec/requests/api/epics_spec.rb'
|
||||
- 'ee/spec/requests/api/namespaces_spec.rb'
|
||||
- 'ee/spec/requests/api/project_clusters_spec.rb'
|
||||
- 'ee/spec/requests/api/project_push_rule_spec.rb'
|
||||
- 'ee/spec/requests/api/projects_spec.rb'
|
||||
- 'ee/spec/requests/api/protected_environments_spec.rb'
|
||||
- 'ee/spec/requests/api/provider_identity_spec.rb'
|
||||
- 'ee/spec/services/gitlab_subscriptions/reconciliations/check_seat_usage_alerts_eligibility_service_spec.rb'
|
||||
- 'ee/spec/services/groups/compliance_report_csv_service_spec.rb'
|
||||
- 'ee/spec/services/groups/mark_for_deletion_service_spec.rb'
|
||||
- 'ee/spec/services/groups/restore_service_spec.rb'
|
||||
- 'ee/spec/services/issue_feature_flags/list_service_spec.rb'
|
||||
- 'ee/spec/services/merge_request_approval_settings/update_service_spec.rb'
|
||||
- 'ee/spec/services/protected_environments/create_service_spec.rb'
|
||||
- 'lib/api/admin/plan_limits.rb'
|
||||
- 'lib/api/alert_management_alerts.rb'
|
||||
- 'lib/api/api.rb'
|
||||
|
|
|
|||
|
|
@ -12,15 +12,6 @@ Lint/AmbiguousOperatorPrecedence:
|
|||
- 'config/initializers/1_settings.rb'
|
||||
- 'config/initializers/carrierwave_s3_encryption_headers_patch.rb'
|
||||
- 'config/initializers/kaminari_active_record_relation_methods_with_limit.rb'
|
||||
- 'danger/roulette/Dangerfile'
|
||||
- 'ee/app/models/geo/upload_registry.rb'
|
||||
- 'ee/app/models/iterations/cadence.rb'
|
||||
- 'ee/app/models/license.rb'
|
||||
- 'ee/app/policies/ee/issuable_policy.rb'
|
||||
- 'ee/app/services/boards/epics/position_create_service.rb'
|
||||
- 'ee/app/services/geo/registry_consistency_service.rb'
|
||||
- 'ee/app/services/vulnerabilities/create_service.rb'
|
||||
- 'ee/lib/gitlab/expiring_subscription_message.rb'
|
||||
- 'spec/lib/gitlab/conan_token_spec.rb'
|
||||
- 'spec/lib/gitlab/database/background_migration/batched_job_spec.rb'
|
||||
- 'spec/lib/gitlab/database/batch_count_spec.rb'
|
||||
|
|
|
|||
|
|
@ -1,13 +1,22 @@
|
|||
<script>
|
||||
import { GlDrawer, GlButton } from '@gitlab/ui';
|
||||
import { GlDrawer, GlBadge, GlSprintf, GlButton, GlIcon } from '@gitlab/ui';
|
||||
import { DRAWER_Z_INDEX } from '~/lib/utils/constants';
|
||||
import { getContentWrapperHeight } from '~/lib/utils/dom_utils';
|
||||
import { helpPagePath } from '~/helpers/help_page_helper';
|
||||
import MembersTableCell from 'ee_else_ce/members/components/table/members_table_cell.vue';
|
||||
import { ACCESS_LEVEL_LABELS } from '~/access_level/constants';
|
||||
import MemberAvatar from './member_avatar.vue';
|
||||
|
||||
export default {
|
||||
components: { MemberAvatar, MembersTableCell, GlDrawer, GlButton },
|
||||
components: {
|
||||
MemberAvatar,
|
||||
MembersTableCell,
|
||||
GlDrawer,
|
||||
GlBadge,
|
||||
GlSprintf,
|
||||
GlButton,
|
||||
GlIcon,
|
||||
},
|
||||
props: {
|
||||
member: {
|
||||
type: Object,
|
||||
|
|
@ -19,9 +28,18 @@ export default {
|
|||
viewPermissionsDocPath() {
|
||||
return helpPagePath('user/permissions');
|
||||
},
|
||||
customRole() {
|
||||
const customRoleId = this.member.accessLevel.memberRoleId;
|
||||
|
||||
return this.member.customRoles?.find(({ memberRoleId }) => memberRoleId === customRoleId);
|
||||
},
|
||||
customRolePermissions() {
|
||||
return this.customRole?.permissions || [];
|
||||
},
|
||||
},
|
||||
getContentWrapperHeight,
|
||||
DRAWER_Z_INDEX,
|
||||
ACCESS_LEVEL_LABELS,
|
||||
};
|
||||
</script>
|
||||
|
||||
|
|
@ -54,12 +72,33 @@ export default {
|
|||
|
||||
<dl>
|
||||
<dt class="gl-mb-3" data-testid="role-header">{{ s__('MemberRole|Role') }}</dt>
|
||||
<dd data-testid="role-value">{{ member.accessLevel.stringValue }}</dd>
|
||||
<dd data-testid="role-value">
|
||||
{{ member.accessLevel.stringValue }}
|
||||
<gl-badge v-if="customRole" size="sm" class="gl-ml-2">
|
||||
{{ s__('MemberRole|Custom role') }}
|
||||
</gl-badge>
|
||||
</dd>
|
||||
|
||||
<template v-if="customRole">
|
||||
<dt class="gl-mt-6 gl-mb-3" data-testid="description-header">
|
||||
{{ s__('MemberRole|Description') }}
|
||||
</dt>
|
||||
<dd data-testid="description-value">
|
||||
{{ member.accessLevel.description }}
|
||||
</dd>
|
||||
</template>
|
||||
|
||||
<dt class="gl-mt-6 gl-mb-3" data-testid="permissions-header">
|
||||
{{ s__('MemberRole|Permissions') }}
|
||||
</dt>
|
||||
<dd>
|
||||
<dd class="gl-display-flex gl-mb-5">
|
||||
<span v-if="customRole" class="gl-mr-3" data-testid="base-role">
|
||||
<gl-sprintf :message="s__('MemberRole|Base role: %{role}')">
|
||||
<template #role>
|
||||
{{ $options.ACCESS_LEVEL_LABELS[customRole.baseAccessLevel] }}
|
||||
</template>
|
||||
</gl-sprintf>
|
||||
</span>
|
||||
<gl-button
|
||||
:href="viewPermissionsDocPath"
|
||||
icon="external-link"
|
||||
|
|
@ -69,6 +108,23 @@ export default {
|
|||
{{ s__('MemberRole|View permissions') }}
|
||||
</gl-button>
|
||||
</dd>
|
||||
|
||||
<div
|
||||
v-for="permission in customRolePermissions"
|
||||
:key="permission.name"
|
||||
class="gl-display-flex"
|
||||
data-testid="permission"
|
||||
>
|
||||
<gl-icon name="check" class="gl-flex-shrink-0" />
|
||||
<div class="gl-mx-3">
|
||||
<span data-testid="permission-name">
|
||||
{{ permission.name }}
|
||||
</span>
|
||||
<p class="gl-mt-2 gl-text-secondary" data-testid="permission-description">
|
||||
{{ permission.description }}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</dl>
|
||||
</div>
|
||||
</gl-drawer>
|
||||
|
|
|
|||
|
|
@ -164,7 +164,6 @@ const SUPPORTED_TRACING_FILTERS = {
|
|||
durationMs: ['>', '<'],
|
||||
operation: ['=', '!='],
|
||||
service: ['=', '!='],
|
||||
period: ['='],
|
||||
traceId: ['=', '!='],
|
||||
attribute: ['='],
|
||||
status: ['=', '!='],
|
||||
|
|
@ -181,27 +180,8 @@ const TRACING_FILTER_TO_QUERY_PARAM = {
|
|||
traceId: 'trace_id',
|
||||
status: 'status',
|
||||
// `attribute` is handled separately, see `handleAttributeFilter` method
|
||||
// `period` is handled separately, see `handleTracingPeriodFilter` method
|
||||
};
|
||||
|
||||
function handleTracingPeriodFilter(rawValue, filterName, filterParams) {
|
||||
if (rawValue.trim().indexOf(' ') < 0) {
|
||||
filterParams.append(filterName, rawValue.trim());
|
||||
return;
|
||||
}
|
||||
|
||||
const dateParts = rawValue.split(' - ');
|
||||
if (dateParts.length === 2) {
|
||||
const [start, end] = dateParts;
|
||||
const startDate = new Date(start);
|
||||
const endDate = new Date(end);
|
||||
if (isValidDate(startDate) && isValidDate(endDate)) {
|
||||
filterParams.append('start_time', startDate.toISOString());
|
||||
filterParams.append('end_time', endDate.toISOString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds URLSearchParams from a filter object of type { [filterName]: undefined | null | Array<{operator: String, value: any} }
|
||||
* e.g:
|
||||
|
|
@ -227,8 +207,6 @@ function addTracingAttributesFiltersToQueryParams(filterObj, filterParams) {
|
|||
validFilters.forEach(({ operator, value: rawValue }) => {
|
||||
if (filterName === 'attribute') {
|
||||
handleAttributeFilter(rawValue, operator, filterParams, 'attr_name', 'attr_value');
|
||||
} else if (filterName === 'period') {
|
||||
handleTracingPeriodFilter(rawValue, filterName, filterParams);
|
||||
} else {
|
||||
const paramName = getFilterParamName(filterName, operator, TRACING_FILTER_TO_QUERY_PARAM);
|
||||
let value = rawValue;
|
||||
|
|
@ -266,10 +244,14 @@ async function fetchTraces(
|
|||
) {
|
||||
const params = new URLSearchParams();
|
||||
|
||||
const { attributes } = filters;
|
||||
const { attributes, dateRange } = filters;
|
||||
|
||||
if (attributes) {
|
||||
addTracingAttributesFiltersToQueryParams(attributes, params);
|
||||
}
|
||||
if (dateRange) {
|
||||
addDateRangeFilterToQueryParams(dateRange, params);
|
||||
}
|
||||
|
||||
if (pageToken) {
|
||||
params.append('page_token', pageToken);
|
||||
|
|
@ -300,10 +282,14 @@ async function fetchTraces(
|
|||
async function fetchTracesAnalytics(tracingAnalyticsUrl, { filters = {}, abortController } = {}) {
|
||||
const params = new URLSearchParams();
|
||||
|
||||
const { attributes } = filters;
|
||||
const { attributes, dateRange } = filters;
|
||||
|
||||
if (attributes) {
|
||||
addTracingAttributesFiltersToQueryParams(attributes, params);
|
||||
}
|
||||
if (dateRange) {
|
||||
addDateRangeFilterToQueryParams(dateRange, params);
|
||||
}
|
||||
|
||||
try {
|
||||
const { data } = await axios.get(tracingAnalyticsUrl, {
|
||||
|
|
|
|||
|
|
@ -89,7 +89,10 @@ export default {
|
|||
</script>
|
||||
|
||||
<template>
|
||||
<div class="gl-display-flex gl-flex-direction-column gl-lg-flex-direction-row gl-gap-3">
|
||||
<div
|
||||
class="gl-display-flex gl-flex-direction-column gl-lg-flex-direction-row gl-gap-3"
|
||||
data-testid="date-range-filter"
|
||||
>
|
||||
<date-ranges-dropdown
|
||||
:selected="dateRange.value"
|
||||
:date-range-options="dateRangeOptions"
|
||||
|
|
|
|||
|
|
@ -16,22 +16,22 @@ export const linkedIssueTypesTextMap = {
|
|||
export const autoCompleteTextMap = {
|
||||
true: {
|
||||
[TYPE_ISSUE]: sprintf(
|
||||
__(' or %{emphasisStart}#issue id%{emphasisEnd}'),
|
||||
__(' or %{emphasisStart}#issue ID%{emphasisEnd}'),
|
||||
{ emphasisStart: '<', emphasisEnd: '>' },
|
||||
false,
|
||||
),
|
||||
[TYPE_INCIDENT]: sprintf(
|
||||
__(' or %{emphasisStart}#id%{emphasisEnd}'),
|
||||
__(' or %{emphasisStart}#ID%{emphasisEnd}'),
|
||||
{ emphasisStart: '<', emphasisEnd: '>' },
|
||||
false,
|
||||
),
|
||||
[TYPE_EPIC]: sprintf(
|
||||
__(' or %{emphasisStart}&epic id%{emphasisEnd}'),
|
||||
__(' or %{emphasisStart}&epic ID%{emphasisEnd}'),
|
||||
{ emphasisStart: '<', emphasisEnd: '>' },
|
||||
false,
|
||||
),
|
||||
[TYPE_MERGE_REQUEST]: sprintf(
|
||||
__(' or %{emphasisStart}!merge request id%{emphasisEnd}'),
|
||||
__(' or %{emphasisStart}!merge request ID%{emphasisEnd}'),
|
||||
{ emphasisStart: '<', emphasisEnd: '>' },
|
||||
false,
|
||||
),
|
||||
|
|
@ -44,15 +44,15 @@ export const autoCompleteTextMap = {
|
|||
};
|
||||
|
||||
export const inputPlaceholderTextMap = {
|
||||
[TYPE_ISSUE]: __('Paste issue link'),
|
||||
[TYPE_INCIDENT]: __('Paste link'),
|
||||
[TYPE_EPIC]: __('Paste epic link'),
|
||||
[TYPE_ISSUE]: __('Enter issue URL'),
|
||||
[TYPE_INCIDENT]: __('Enter URL'),
|
||||
[TYPE_EPIC]: __('Enter epic URL'),
|
||||
[TYPE_MERGE_REQUEST]: __('Enter merge request URLs'),
|
||||
};
|
||||
|
||||
export const inputPlaceholderConfidentialTextMap = {
|
||||
[TYPE_ISSUE]: __('Paste confidential issue link'),
|
||||
[TYPE_EPIC]: __('Paste confidential epic link'),
|
||||
[TYPE_ISSUE]: __('Enter confidential issue URL'),
|
||||
[TYPE_EPIC]: __('Enter confidential epic URL'),
|
||||
[TYPE_MERGE_REQUEST]: __('Enter merge request URLs'),
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ def note_for_spin_role(spin, role, category)
|
|||
spin.public_send(role)&.markdown_name(author: roulette.team_mr_author) # rubocop:disable GitlabSecurity/PublicSend
|
||||
end
|
||||
|
||||
note || template % { role: role }
|
||||
note || (template % { role: role })
|
||||
end
|
||||
|
||||
def markdown_row_for_spin(category, spin)
|
||||
|
|
|
|||
|
|
@ -8,3 +8,4 @@ description: User mentions in commit messages
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/19009
|
||||
milestone: '12.6'
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/444222
|
||||
|
|
|
|||
|
|
@ -8,3 +8,4 @@ description: Stores diff notes positions
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/28113
|
||||
milestone: '13.0'
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/444222
|
||||
|
|
|
|||
|
|
@ -8,3 +8,4 @@ description: Store commit user information for merge request diffs
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/63669
|
||||
milestone: '14.1'
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/444232
|
||||
|
|
|
|||
|
|
@ -8,3 +8,4 @@ description: Persisted truncated note diffs
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/18991
|
||||
milestone: '11.0'
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/444222
|
||||
|
|
|
|||
|
|
@ -12,3 +12,4 @@ description: Store any extra metadata for notes
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/117149
|
||||
milestone: '16.0'
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/444222
|
||||
|
|
|
|||
|
|
@ -17,7 +17,8 @@ feature_categories:
|
|||
- service_desk
|
||||
- source_code_management
|
||||
- team_planning
|
||||
description: The object at the core of comments, discussions and system notes shown on issues, MRs and epics
|
||||
description: The object at the core of comments, discussions and system notes shown
|
||||
on issues, MRs and epics
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/commit/9ba1224867665844b117fa037e1465bb706b3685
|
||||
milestone: "<6.0"
|
||||
gitlab_schema: gitlab_main_cell
|
||||
|
|
@ -27,3 +28,4 @@ allow_cross_transactions:
|
|||
- gitlab_main_clusterwide
|
||||
allow_cross_foreign_keys:
|
||||
- gitlab_main_clusterwide
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/444222
|
||||
|
|
|
|||
|
|
@ -7,4 +7,12 @@ feature_categories:
|
|||
description: Settings related to Organizations
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/123380
|
||||
milestone: '16.2'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
allow_cross_joins:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_transactions:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_foreign_keys:
|
||||
- gitlab_main_clusterwide
|
||||
sharding_key:
|
||||
organization_id: organizations
|
||||
|
|
|
|||
|
|
@ -8,3 +8,5 @@ description: User who has joined an organization
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/123804
|
||||
milestone: '16.2'
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key:
|
||||
organization_id: organizations
|
||||
|
|
|
|||
|
|
@ -8,3 +8,4 @@ description: Storing code suggestions within notes
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/8656
|
||||
milestone: '11.6'
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/444222
|
||||
|
|
|
|||
|
|
@ -10,7 +10,9 @@ feature_categories:
|
|||
- groups_and_projects
|
||||
- source_code_management
|
||||
- team_planning
|
||||
description: For tracking blob metadata. Single table inheritance is used to relate this table to many other tables.
|
||||
description: For tracking blob metadata. Single table inheritance is used to relate
|
||||
this table to many other tables.
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/commit/4c622b71fd284058deee483bf0009f8179b792bc
|
||||
milestone: '9.0'
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/398199
|
||||
|
|
|
|||
|
|
@ -164,7 +164,7 @@ sequenceDiagram
|
|||
| GitLab Duo Chat | Anthropic Claude-2 <br/> Vertex AI Codey textembedding-gecko | Not planned | Not planned |
|
||||
| Code Completion | Vertex AI Codey code-gecko | ✅ | ✅ |
|
||||
| Code Generation | Anthropic Claude-3 | ✅ | ✅ |
|
||||
| Git Suggestions | Vertex AI Codey codechat-bison | Not planned | Not planned |
|
||||
| GitLab Duo for the CLI | Vertex AI Codey codechat-bison | Not planned | Not planned |
|
||||
| Discussion Summary | Vertex AI Codey text-bison | Not planned | Not planned |
|
||||
| Issue Description Generation | Anthropic Claude-2 | Not planned | Not planned |
|
||||
| Test Generation | Anthropic Claude-2 | Not planned | Not planned |
|
||||
|
|
|
|||
|
|
@ -50,7 +50,6 @@ glab mr merge
|
|||
|
||||
- [`glab alias`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/alias)
|
||||
- [`glab api`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/api)
|
||||
- [`glab ask`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/ask)
|
||||
- [`glab auth`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/auth)
|
||||
- [`glab changelog`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/changelog)
|
||||
- [`glab check-update`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/check-update)
|
||||
|
|
@ -58,6 +57,7 @@ glab mr merge
|
|||
- [`glab cluster`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/cluster)
|
||||
- [`glab completion`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/completion)
|
||||
- [`glab config`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/config)
|
||||
- [`glab duo`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/duo)
|
||||
- [`glab incident`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/incident)
|
||||
- [`glab issue`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/issue)
|
||||
- [`glab label`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/label)
|
||||
|
|
@ -70,20 +70,22 @@ glab mr merge
|
|||
- [`glab user`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/user)
|
||||
- [`glab variable`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/variable)
|
||||
|
||||
## Git suggestions
|
||||
## GitLab Duo for the CLI
|
||||
|
||||
DETAILS:
|
||||
**Tier:** Freely available for Ultimate for a limited time. In the future, will require [GitLab Duo Enterprise](../../subscriptions/subscription-add-ons.md).
|
||||
**Offering:** GitLab.com
|
||||
|
||||
The GitLab CLI includes Git suggestions, powered by [GitLab Duo](../../user/ai_features.md).
|
||||
The GitLab CLI includes features powered by [GitLab Duo](../../user/ai_features.md). These include:
|
||||
|
||||
- [`glab duo ask`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/duo/ask)
|
||||
|
||||
To ask questions about `git` commands while you work, type:
|
||||
|
||||
- [`glab ask`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/ask)
|
||||
- [`glab duo ask`](https://gitlab.com/gitlab-org/cli/-/tree/main/docs/source/duo/ask)
|
||||
|
||||
The `glab ask` command can help you remember a command you forgot,
|
||||
or provide suggestions on how to run commands to perform other tasks.
|
||||
The `glab duo ask` command can help you remember a `git` command you forgot,
|
||||
or provide suggestions on how to run `git` commands to perform other tasks.
|
||||
|
||||
## Install the CLI
|
||||
|
||||
|
|
|
|||
|
|
@ -124,15 +124,15 @@ DETAILS:
|
|||
|
||||
## Experimental features
|
||||
|
||||
### Git suggestions
|
||||
### GitLab Duo for the CLI
|
||||
|
||||
DETAILS:
|
||||
**Tier:** Freely available for Ultimate for a limited time. In the future, will require [GitLab Duo Enterprise](../subscriptions/subscription-add-ons.md).
|
||||
**Offering:** GitLab.com
|
||||
|
||||
- Helps you discover or recall Git commands when and where you need them.
|
||||
- `glab duo ask` helps you discover or recall `git` commands when and where you need them.
|
||||
- LLM: Vertex AI Codey [`codechat-bison`](https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/code-chat)
|
||||
- [View documentation](../editor_extensions/gitlab_cli/index.md#git-suggestions).
|
||||
- [View documentation](../editor_extensions/gitlab_cli/index.md#gitlab-duo-for-the-cli).
|
||||
|
||||
### Discussion summary
|
||||
|
||||
|
|
|
|||
|
|
@ -59,16 +59,16 @@ msgstr[1] ""
|
|||
msgid " or "
|
||||
msgstr ""
|
||||
|
||||
msgid " or %{emphasisStart}!merge request id%{emphasisEnd}"
|
||||
msgid " or %{emphasisStart}!merge request ID%{emphasisEnd}"
|
||||
msgstr ""
|
||||
|
||||
msgid " or %{emphasisStart}#id%{emphasisEnd}"
|
||||
msgid " or %{emphasisStart}#ID%{emphasisEnd}"
|
||||
msgstr ""
|
||||
|
||||
msgid " or %{emphasisStart}#issue id%{emphasisEnd}"
|
||||
msgid " or %{emphasisStart}#issue ID%{emphasisEnd}"
|
||||
msgstr ""
|
||||
|
||||
msgid " or %{emphasisStart}&epic id%{emphasisEnd}"
|
||||
msgid " or %{emphasisStart}&epic ID%{emphasisEnd}"
|
||||
msgstr ""
|
||||
|
||||
msgid " or references"
|
||||
|
|
@ -19994,6 +19994,9 @@ msgstr ""
|
|||
msgid "Enter %{weights_link_start}weights%{weights_link_end} for storages for new repositories. Configured storages appear below."
|
||||
msgstr ""
|
||||
|
||||
msgid "Enter URL"
|
||||
msgstr ""
|
||||
|
||||
msgid "Enter a URL for your custom emoji"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -20024,9 +20027,21 @@ msgstr ""
|
|||
msgid "Enter at least three characters to search."
|
||||
msgstr ""
|
||||
|
||||
msgid "Enter confidential epic URL"
|
||||
msgstr ""
|
||||
|
||||
msgid "Enter confidential issue URL"
|
||||
msgstr ""
|
||||
|
||||
msgid "Enter epic URL"
|
||||
msgstr ""
|
||||
|
||||
msgid "Enter in your Bitbucket Server URL and personal access token below"
|
||||
msgstr ""
|
||||
|
||||
msgid "Enter issue URL"
|
||||
msgstr ""
|
||||
|
||||
msgid "Enter license key"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -31753,6 +31768,9 @@ msgstr ""
|
|||
msgid "MemberRole|Base role"
|
||||
msgstr ""
|
||||
|
||||
msgid "MemberRole|Base role: %{role}"
|
||||
msgstr ""
|
||||
|
||||
msgid "MemberRole|Cannot create a member role with no enabled permissions"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -37425,24 +37443,9 @@ msgstr ""
|
|||
msgid "Paste a public key here. %{link_start}How do I generate it?%{link_end}"
|
||||
msgstr ""
|
||||
|
||||
msgid "Paste confidential epic link"
|
||||
msgstr ""
|
||||
|
||||
msgid "Paste confidential issue link"
|
||||
msgstr ""
|
||||
|
||||
msgid "Paste epic link"
|
||||
msgstr ""
|
||||
|
||||
msgid "Paste group path (i.e. gitlab-org) or project path (i.e. gitlab-org/gitlab)"
|
||||
msgstr ""
|
||||
|
||||
msgid "Paste issue link"
|
||||
msgstr ""
|
||||
|
||||
msgid "Paste link"
|
||||
msgstr ""
|
||||
|
||||
msgid "Paste project path (i.e. gitlab-org/gitlab)"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -9,16 +9,8 @@ module QA
|
|||
|
||||
let(:branch) { "push-options-test-#{SecureRandom.hex(8)}" }
|
||||
let(:title) { "MR push options test #{SecureRandom.hex(8)}" }
|
||||
|
||||
let(:project) { create(:project, :with_readme, name: 'merge-request-push-options') }
|
||||
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.project = project
|
||||
runner.name = "runner-for-#{project.name}"
|
||||
runner.tags = ["runner-for-#{project.name}"]
|
||||
end
|
||||
end
|
||||
let!(:runner) { create(:project_runner, project: project, name: "runner-for-#{project.name}", tags: ["runner-for-#{project.name}"]) }
|
||||
|
||||
after do |example|
|
||||
runner.remove_via_api!
|
||||
|
|
|
|||
|
|
@ -12,19 +12,17 @@ module QA
|
|||
let!(:downstream_project) { create(:project, name: 'downstream-project') }
|
||||
|
||||
let!(:upstream_project_runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.project = upstream_project
|
||||
runner.name = executor
|
||||
runner.tags = [executor]
|
||||
end
|
||||
create(:project_runner,
|
||||
project: upstream_project,
|
||||
name: executor,
|
||||
tags: [executor])
|
||||
end
|
||||
|
||||
let!(:downstream_project_runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.project = downstream_project
|
||||
runner.name = "#{executor}-downstream"
|
||||
runner.tags = [executor]
|
||||
end
|
||||
create(:project_runner,
|
||||
project: downstream_project,
|
||||
name: "#{executor}-downstream",
|
||||
tags: [executor])
|
||||
end
|
||||
|
||||
let(:upstream_project_files) do
|
||||
|
|
|
|||
|
|
@ -5,14 +5,7 @@ module QA
|
|||
describe 'Pipeline with project file variables' do
|
||||
let(:executor) { "qa-runner-#{Faker::Alphanumeric.alphanumeric(number: 8)}" }
|
||||
let(:project) { create(:project, name: 'project-with-file-variables') }
|
||||
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.project = project
|
||||
runner.name = executor
|
||||
runner.tags = [executor]
|
||||
end
|
||||
end
|
||||
let!(:runner) { create(:project_runner, project: project, name: executor, tags: [executor]) }
|
||||
|
||||
let(:add_ci_file) do
|
||||
create(:commit, project: project, commit_message: 'Add .gitlab-ci.yml', actions: [
|
||||
|
|
|
|||
|
|
@ -11,17 +11,14 @@ module QA
|
|||
|
||||
describe 'SaaS Container Registry API' do
|
||||
let(:api_client) { Runtime::API::Client.new(:gitlab) }
|
||||
let(:executor) { "qa-runner-#{Faker::Alphanumeric.alphanumeric(number: 8)}" }
|
||||
|
||||
let(:project) do
|
||||
create(:project, name: 'project-with-registry-api', template_name: 'express', api_client: api_client)
|
||||
end
|
||||
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.project = project
|
||||
runner.name = "runner-for-#{project.name}"
|
||||
runner.tags = ["runner-for-#{project.name}"]
|
||||
end
|
||||
create(:project_runner, project: project, name: executor, tags: [executor], executor: :docker)
|
||||
end
|
||||
|
||||
let!(:project_access_token) { create(:project_access_token, project: project) }
|
||||
|
|
|
|||
|
|
@ -29,14 +29,7 @@ module QA
|
|||
let(:executor) { "qa-runner-#{Time.now.to_i}" }
|
||||
let(:emails) { %w[foo@bar.com baz@buzz.com] }
|
||||
let(:project) { create(:project, name: 'pipeline-status-project') }
|
||||
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.project = project
|
||||
runner.name = executor
|
||||
runner.tags = [executor]
|
||||
end
|
||||
end
|
||||
let!(:runner) { create(:project_runner, project: project, name: executor, tags: [executor]) }
|
||||
|
||||
let(:mail_hog) { Vendor::MailHog::API.new }
|
||||
|
||||
|
|
|
|||
|
|
@ -17,13 +17,7 @@ module QA
|
|||
create(:project, :with_readme, name: 'project-to-test-component')
|
||||
end
|
||||
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.project = test_project
|
||||
runner.name = executor
|
||||
runner.tags = [executor]
|
||||
end
|
||||
end
|
||||
let!(:runner) { create(:project_runner, project: test_project, name: executor, tags: [executor]) }
|
||||
|
||||
let(:component_content) do
|
||||
<<~YAML
|
||||
|
|
|
|||
|
|
@ -9,13 +9,7 @@ module QA
|
|||
describe 'Unlocking job artifacts across parent-child pipelines' do
|
||||
let(:executor) { "qa-runner-#{Faker::Alphanumeric.alphanumeric(number: 8)}" }
|
||||
let(:project) { create(:project, name: 'unlock-job-artifacts-parent-child-project') }
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.project = project
|
||||
runner.name = executor
|
||||
runner.tags = [executor]
|
||||
end
|
||||
end
|
||||
let!(:runner) { create(:project_runner, project: project, name: executor, tags: [executor]) }
|
||||
|
||||
before do
|
||||
Flow::Login.sign_in
|
||||
|
|
|
|||
|
|
@ -5,14 +5,7 @@ module QA
|
|||
describe "Unlocking job artifacts across pipelines" do
|
||||
let(:executor) { "qa-runner-#{Faker::Alphanumeric.alphanumeric(number: 8)}" }
|
||||
let(:project) { create(:project, name: 'unlock-job-artifacts-project') }
|
||||
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.project = project
|
||||
runner.name = executor
|
||||
runner.tags = [executor]
|
||||
end
|
||||
end
|
||||
let!(:runner) { create(:project_runner, project: project, name: executor, tags: [executor]) }
|
||||
|
||||
before do
|
||||
Flow::Login.sign_in
|
||||
|
|
|
|||
|
|
@ -7,13 +7,7 @@ module QA
|
|||
let(:pipeline_job_name) { 'customizable-variable' }
|
||||
let(:variable_custom_value) { 'Custom Foo' }
|
||||
let(:project) { create(:project, name: 'project-with-customizable-variable-pipeline') }
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.project = project
|
||||
runner.name = executor
|
||||
runner.tags = [executor]
|
||||
end
|
||||
end
|
||||
let!(:runner) { create(:project_runner, project: project, name: executor, tags: [executor]) }
|
||||
|
||||
let!(:commit) do
|
||||
create(:commit, project: project, commit_message: 'Add .gitlab-ci.yml', actions: [
|
||||
|
|
|
|||
|
|
@ -6,14 +6,7 @@ module QA
|
|||
let(:executor) { "qa-runner-#{Time.now.to_i}" }
|
||||
let(:pipeline_job_name) { 'rspec' }
|
||||
let(:project) { create(:project, name: 'project-with-raw-variable-pipeline') }
|
||||
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.project = project
|
||||
runner.name = executor
|
||||
runner.tags = [executor]
|
||||
end
|
||||
end
|
||||
let!(:runner) { create(:project_runner, project: project, name: executor, tags: [executor]) }
|
||||
|
||||
let!(:commit_ci_file) do
|
||||
create(:commit, project: project, commit_message: 'Add .gitlab-ci.yml', actions: [
|
||||
|
|
|
|||
|
|
@ -9,14 +9,7 @@ module QA
|
|||
|
||||
let(:project) { create(:project, name: 'project-with-pipeline-1') }
|
||||
let(:other_project) { create(:project, name: 'project-with-pipeline-2') }
|
||||
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.project = project
|
||||
runner.name = executor
|
||||
runner.tags = [executor]
|
||||
end
|
||||
end
|
||||
let!(:runner) { create(:project_runner, project: project, name: executor, tags: [executor]) }
|
||||
|
||||
before do
|
||||
Flow::Login.sign_in
|
||||
|
|
|
|||
|
|
@ -9,13 +9,7 @@ module QA
|
|||
context 'when pipeline is blocked' do
|
||||
let(:executor) { "qa-runner-#{Faker::Alphanumeric.alphanumeric(number: 8)}" }
|
||||
let(:project) { create(:project, name: 'project-with-blocked-pipeline') }
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.project = project
|
||||
runner.name = executor
|
||||
runner.tags = [executor]
|
||||
end
|
||||
end
|
||||
let!(:runner) { create(:project_runner, project: project, name: executor, tags: [executor]) }
|
||||
|
||||
let!(:ci_file) do
|
||||
create(:commit, project: project, commit_message: 'Add .gitlab-ci.yml', actions: [
|
||||
|
|
|
|||
|
|
@ -8,14 +8,7 @@ module QA
|
|||
let(:group) { create(:group) }
|
||||
let(:upstream_project) { create(:project, group: group, name: 'upstream-project-with-bridge') }
|
||||
let(:downstream_project) { create(:project, group: group, name: 'downstream-project-with-bridge') }
|
||||
|
||||
let!(:runner) do
|
||||
Resource::GroupRunner.fabricate! do |runner|
|
||||
runner.name = executor
|
||||
runner.tags = [executor]
|
||||
runner.group = group
|
||||
end
|
||||
end
|
||||
let!(:runner) { create(:group_runner, group: group, name: executor, tags: [executor]) }
|
||||
|
||||
before do
|
||||
Flow::Login.sign_in
|
||||
|
|
|
|||
|
|
@ -7,12 +7,11 @@ module QA
|
|||
let(:job_name) { "test-job-#{pull_policies.join('-')}" }
|
||||
let(:project) { create(:project, name: 'pipeline-with-image-pull-policy') }
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.project = project
|
||||
runner.name = runner_name
|
||||
runner.tags = [runner_name]
|
||||
runner.executor = :docker
|
||||
end
|
||||
create(:project_runner,
|
||||
project: project,
|
||||
name: runner_name,
|
||||
tags: [runner_name],
|
||||
executor: :docker)
|
||||
end
|
||||
|
||||
before do
|
||||
|
|
|
|||
|
|
@ -5,13 +5,7 @@ module QA
|
|||
describe "Trigger child pipeline with 'when:manual'" do
|
||||
let(:executor) { "qa-runner-#{Faker::Alphanumeric.alphanumeric(number: 8)}" }
|
||||
let(:project) { create(:project, name: 'project-with-pipeline') }
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.project = project
|
||||
runner.name = executor
|
||||
runner.tags = [executor]
|
||||
end
|
||||
end
|
||||
let!(:runner) { create(:project_runner, project: project, name: executor, tags: [executor]) }
|
||||
|
||||
before do
|
||||
Flow::Login.sign_in
|
||||
|
|
|
|||
|
|
@ -5,13 +5,7 @@ module QA
|
|||
describe 'Trigger matrix' do
|
||||
let(:executor) { "qa-runner-#{Faker::Alphanumeric.alphanumeric(number: 8)}" }
|
||||
let(:project) { create(:project, name: 'project-with-pipeline') }
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.project = project
|
||||
runner.name = executor
|
||||
runner.tags = [executor]
|
||||
end
|
||||
end
|
||||
let!(:runner) { create(:project_runner, project: project, name: executor, tags: [executor]) }
|
||||
|
||||
before do
|
||||
Flow::Login.sign_in
|
||||
|
|
|
|||
|
|
@ -4,12 +4,7 @@ module QA
|
|||
RSpec.describe 'Verify', :runner, product_group: :runner do
|
||||
describe 'Group runner registration' do
|
||||
let(:executor) { "qa-runner-#{Time.now.to_i}" }
|
||||
|
||||
let!(:runner) do
|
||||
Resource::GroupRunner.fabricate! do |runner|
|
||||
runner.name = executor
|
||||
end
|
||||
end
|
||||
let!(:runner) { create(:group_runner, name: executor) }
|
||||
|
||||
after do
|
||||
runner.remove_via_api!
|
||||
|
|
|
|||
|
|
@ -4,12 +4,7 @@ module QA
|
|||
RSpec.describe 'Verify', :runner, product_group: :runner do
|
||||
describe 'Runner registration' do
|
||||
let(:executor) { "qa-runner-#{Time.now.to_i}" }
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.name = executor
|
||||
runner.tags = ['e2e-test']
|
||||
end
|
||||
end
|
||||
let!(:runner) { create(:project_runner, name: executor, tags: ['e2e-test']) }
|
||||
|
||||
after do
|
||||
runner.remove_via_api!
|
||||
|
|
|
|||
|
|
@ -20,12 +20,11 @@ module QA
|
|||
end
|
||||
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.name = "qa-runner-#{Time.now.to_i}"
|
||||
runner.tags = ["runner-for-#{project.name}"]
|
||||
runner.executor = :docker
|
||||
runner.project = project
|
||||
end
|
||||
create(:project_runner,
|
||||
name: "qa-runner-#{Time.now.to_i}",
|
||||
tags: ["runner-for-#{project.name}"],
|
||||
executor: :docker,
|
||||
project: project)
|
||||
end
|
||||
|
||||
let(:personal_access_token) { Runtime::Env.personal_access_token }
|
||||
|
|
|
|||
|
|
@ -8,12 +8,11 @@ module QA
|
|||
|
||||
let(:project) { create(:project, :private, name: 'dependency-proxy-project') }
|
||||
let!(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.name = "qa-runner-#{Time.now.to_i}"
|
||||
runner.tags = ["runner-for-#{project.name}"]
|
||||
runner.executor = :docker
|
||||
runner.project = project
|
||||
end
|
||||
create(:project_runner,
|
||||
name: "qa-runner-#{Time.now.to_i}",
|
||||
tags: ["runner-for-#{project.name}"],
|
||||
executor: :docker,
|
||||
project: project)
|
||||
end
|
||||
|
||||
let(:group_deploy_token) do
|
||||
|
|
|
|||
|
|
@ -19,12 +19,11 @@ module QA
|
|||
let(:package_type) { 'maven_gradle' }
|
||||
let(:project) { create(:project, :private, :with_readme, name: "#{package_type}_project") }
|
||||
let(:runner) do
|
||||
Resource::ProjectRunner.fabricate! do |runner|
|
||||
runner.name = "qa-runner-#{Time.now.to_i}"
|
||||
runner.tags = ["runner-for-#{project.name}"]
|
||||
runner.executor = :docker
|
||||
runner.project = project
|
||||
end
|
||||
create(:project_runner,
|
||||
name: "qa-runner-#{Time.now.to_i}",
|
||||
tags: ["runner-for-#{project.name}"],
|
||||
executor: :docker,
|
||||
project: project)
|
||||
end
|
||||
|
||||
let(:gitlab_address_with_port) do
|
||||
|
|
|
|||
|
|
@ -27,14 +27,7 @@ module QA
|
|||
description: 'Project for pipeline with variable defined via UI - Downstream',
|
||||
group: group)
|
||||
end
|
||||
|
||||
let!(:runner) do
|
||||
Resource::GroupRunner.fabricate! do |runner|
|
||||
runner.group = group
|
||||
runner.name = random_string
|
||||
runner.tags = [random_string]
|
||||
end
|
||||
end
|
||||
let!(:runner) { create(:group_runner, group: group, name: random_string, tags: [random_string]) }
|
||||
|
||||
before do
|
||||
Flow::Login.sign_in
|
||||
|
|
|
|||
|
|
@ -0,0 +1,30 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'fog/google'
|
||||
|
||||
module QA
|
||||
module Support
|
||||
module GcsTools
|
||||
# GCS Client
|
||||
#
|
||||
# @return [Fog::Storage::Google]
|
||||
def gcs_client
|
||||
Fog::Storage::Google.new(
|
||||
google_project: ENV['QA_METRICS_GCS_PROJECT_ID'] || raise('Missing QA_METRICS_GCS_PROJECT_ID env variable'),
|
||||
**gcs_credentials)
|
||||
end
|
||||
|
||||
# GCS Credentials
|
||||
#
|
||||
# @return [Hash]
|
||||
def gcs_credentials
|
||||
json_key = ENV['QA_METRICS_GCS_CREDS'] || raise(
|
||||
'QA_METRICS_GCS_CREDS env variable is required!'
|
||||
)
|
||||
return { google_json_key_location: json_key } if File.exist?(json_key)
|
||||
|
||||
{ google_json_key_string: json_key }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -39,7 +39,9 @@ module QA
|
|||
ENV["QA_INFLUXDB_TOKEN"] || raise("Missing QA_INFLUXDB_TOKEN env variable"),
|
||||
bucket: INFLUX_TEST_METRICS_BUCKET,
|
||||
org: "gitlab-qa",
|
||||
precision: InfluxDB2::WritePrecision::NANOSECOND
|
||||
precision: InfluxDB2::WritePrecision::NANOSECOND,
|
||||
read_timeout: ENV["QA_INFLUXDB_TIMEOUT"].to_i || 10,
|
||||
open_timeout: ENV["QA_INFLUXDB_TIMEOUT"].to_i || 10
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -1,123 +1,52 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'csv'
|
||||
require "fog/google"
|
||||
|
||||
module QA
|
||||
module Tools
|
||||
class MigrateInfluxDataToGcs
|
||||
include Support::GcsTools
|
||||
include Support::InfluxdbTools
|
||||
include Support::Repeater
|
||||
include Support::Retrier
|
||||
|
||||
RETRY_BACK_OFF_DELAY = 30
|
||||
MAX_RETRY_ATTEMPTS = 3
|
||||
|
||||
QA_METRICS_GCS_BUCKET_NAME = ENV['QA_METRICS_GCS_BUCKET_NAME'] ||
|
||||
raise('Missing QA_METRICS_GCS_BUCKET_NAME env variable')
|
||||
|
||||
# Google Cloud Storage bucket from which Snowpipe would pull data into Snowflake
|
||||
QA_GCS_BUCKET_NAME = ENV["QA_GCS_BUCKET_NAME"] || raise("Missing QA_GCS_BUCKET_NAME env variable")
|
||||
QA_GCS_PROJECT_ID = ENV["QA_GCS_PROJECT_ID"] || raise("Missing QA_GCS_PROJECT_ID env variable")
|
||||
QA_GCS_JSON_KEY_FILE = ENV["QA_GCS_JSON_KEY_FILE"] || raise("Missing QA_GCS_JSON_KEY_FILE env variable")
|
||||
INFLUX_STATS_TYPE = %w[test-stats fabrication-stats].freeze
|
||||
INFLUX_BUCKETS = [Support::InfluxdbTools::INFLUX_TEST_METRICS_BUCKET,
|
||||
Support::InfluxdbTools::INFLUX_MAIN_TEST_METRICS_BUCKET].freeze
|
||||
TEST_STATS_FIELDS = %w[id testcase file_path name product_group stage job_id job_name
|
||||
job_url pipeline_id pipeline_url merge_request merge_request_iid smoke blocking quarantined
|
||||
retried retry_attempts run_time run_type status ui_fabrication api_fabrication total_fabrication].freeze
|
||||
FABRICATION_STATS_FIELDS = %w[timestamp resource fabrication_method http_method run_type
|
||||
merge_request fabrication_time info job_url].freeze
|
||||
INFLUX_BUCKETS = [INFLUX_TEST_METRICS_BUCKET, INFLUX_MAIN_TEST_METRICS_BUCKET].freeze
|
||||
|
||||
def initialize(range)
|
||||
@range = range.to_i
|
||||
def initialize(_args)
|
||||
@retry_backoff = 0
|
||||
end
|
||||
|
||||
# Run Influx Migrator
|
||||
# @param [Integer] the last x hours for which data is required
|
||||
#
|
||||
# @param [Hash] the arguments hash
|
||||
# @return [void]
|
||||
def self.run(range: 6)
|
||||
migrator = new(range)
|
||||
def self.run(args)
|
||||
migrator = new(args)
|
||||
|
||||
QA::Runtime::Logger.info("Fetching Influx data for the last #{range} hours")
|
||||
migrator.migrate_data
|
||||
end
|
||||
|
||||
# Fetch data from Influx DB , store as CSV and upload to GCS
|
||||
# @return [void]
|
||||
def migrate_data
|
||||
INFLUX_BUCKETS.each do |bucket|
|
||||
INFLUX_STATS_TYPE.each do |stats_type|
|
||||
if bucket == Support::InfluxdbTools::INFLUX_MAIN_TEST_METRICS_BUCKET && stats_type == "fabrication-stats"
|
||||
break
|
||||
end
|
||||
|
||||
file_name = "#{bucket.end_with?('main') ? 'main' : 'all'}-#{stats_type}_#{Time.now.to_i}.csv"
|
||||
influx_to_csv(bucket, stats_type, file_name)
|
||||
|
||||
# Upload to Google Cloud Storage
|
||||
upload_to_gcs(QA_GCS_BUCKET_NAME, file_name)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# FluxQL query used to fetch data
|
||||
# @param [String] influx bucket to fetch data
|
||||
# @param [String] Type of data to fetch
|
||||
# @return [String] query string
|
||||
def query(influx_bucket, stats_type)
|
||||
#
|
||||
# @param [String] influx_bucket bucket to fetch data
|
||||
# @param [String] stats_type of data to fetch
|
||||
# @param [String] range for influxdb query
|
||||
# @return [void]
|
||||
def query(influx_bucket, stats_type, range)
|
||||
<<~QUERY
|
||||
from(bucket: "#{influx_bucket}")
|
||||
|> range(start: -#{@range}h)
|
||||
|> filter(fn: (r) => r["_measurement"] == "#{stats_type}")
|
||||
|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
|
||||
|> drop(columns: ["_start", "_stop", "result", "table", "_time", "_measurement"])
|
||||
from(bucket: "#{influx_bucket}")
|
||||
|> range(#{range})
|
||||
|> filter(fn: (r) => r["_measurement"] == "#{stats_type}")
|
||||
|> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")
|
||||
QUERY
|
||||
end
|
||||
|
||||
# Query InfluxDB and store in CSV
|
||||
# @param [String] influx bucket to fetch data
|
||||
# @param [String] Type of data to fetch
|
||||
# @param [String] CSV filename to store data
|
||||
# @return void
|
||||
def influx_to_csv(influx_bucket, stats_type, data_file_name)
|
||||
all_runs = query_api.query(query: query(influx_bucket, stats_type))
|
||||
CSV.open(data_file_name, "wb", col_sep: '|') do |csv|
|
||||
stats_array = stats_type == "test-stats" ? TEST_STATS_FIELDS : FABRICATION_STATS_FIELDS
|
||||
all_runs.each do |table|
|
||||
table.records.each do |record|
|
||||
csv << stats_array.map { |key| record.values[key] }
|
||||
end
|
||||
end
|
||||
QA::Runtime::Logger.info("File #{data_file_name} contains #{all_runs.count} rows")
|
||||
end
|
||||
end
|
||||
|
||||
# Fetch GCS Credentials
|
||||
# @return [Hash] GCS Credentials
|
||||
def gcs_credentials
|
||||
json_key = ENV["QA_GCS_JSON_KEY_FILE"] || raise(
|
||||
"QA_GCS_JSON_KEY_FILE env variable is required!"
|
||||
)
|
||||
return { google_json_key_location: json_key } if File.exist?(json_key)
|
||||
|
||||
{ google_json_key_string: json_key }
|
||||
end
|
||||
|
||||
# Upload file to GCS
|
||||
# @param [String] bucket to be uploaded to
|
||||
# @param [String] path of file to be uploaded
|
||||
# return void
|
||||
def upload_to_gcs(bucket, backup_file_path)
|
||||
client = Fog::Storage::Google.new(google_project: QA_GCS_PROJECT_ID, **gcs_credentials)
|
||||
file_path = backup_file_path.tr('_0-9', '')
|
||||
|
||||
# Backup older file
|
||||
begin
|
||||
QA::Runtime::Logger.info("Backing up older file to #{backup_file_path}")
|
||||
client.copy_object(bucket, file_path, bucket, backup_file_path)
|
||||
rescue Google::Apis::ClientError
|
||||
QA::Runtime::Logger.warn("File #{file_path} is not found in GCS bucket, continuing with upload...")
|
||||
end
|
||||
|
||||
# Upload new file
|
||||
file = client.put_object(bucket, file_path, File.new(backup_file_path, "r"), force: true)
|
||||
QA::Runtime::Logger.info("File #{file_path} uploaded to gs://#{bucket}/#{file.name}")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,83 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'csv'
|
||||
|
||||
module QA
|
||||
module Tools
|
||||
class MigrateInfluxDataToGcsCsv < MigrateInfluxDataToGcs
|
||||
TEST_STATS_FIELDS = %w[id testcase file_path name product_group stage job_id job_name
|
||||
job_url pipeline_id pipeline_url merge_request merge_request_iid smoke blocking quarantined
|
||||
retried retry_attempts run_time run_type status ui_fabrication api_fabrication total_fabrication].freeze
|
||||
FABRICATION_STATS_FIELDS = %w[timestamp resource fabrication_method http_method run_type
|
||||
merge_request fabrication_time info job_url].freeze
|
||||
|
||||
def initialize(args)
|
||||
super
|
||||
|
||||
@hours = args[:hours].to_i
|
||||
end
|
||||
|
||||
# Fetch data from Influx DB, store as CSV and upload to GCS
|
||||
#
|
||||
# @return [void]
|
||||
def migrate_data
|
||||
INFLUX_BUCKETS.each do |bucket|
|
||||
INFLUX_STATS_TYPE.each do |stats_type|
|
||||
if bucket == Support::InfluxdbTools::INFLUX_MAIN_TEST_METRICS_BUCKET && stats_type == "fabrication-stats"
|
||||
break
|
||||
end
|
||||
|
||||
file_name = "#{bucket.end_with?('main') ? 'main' : 'all'}-#{stats_type}_#{Time.now.to_i}.csv"
|
||||
influx_to_csv(bucket, stats_type, file_name)
|
||||
|
||||
# Upload to Google Cloud Storage
|
||||
upload_to_gcs(QA_METRICS_GCS_BUCKET_NAME, file_name)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Query InfluxDB and store in JSON
|
||||
#
|
||||
# @param [String] influx_bucket bucket to fetch data
|
||||
# @param [String] stats_type of data to fetch
|
||||
# @param [String] data_file_name to store data
|
||||
# @return void
|
||||
def influx_to_csv(influx_bucket, stats_type, data_file_name)
|
||||
QA::Runtime::Logger.info("Fetching Influx data for the last #{@hours} hours")
|
||||
all_runs = query_api.query(query: query(influx_bucket, stats_type, "start: -#{@hours}h"))
|
||||
CSV.open(data_file_name, "wb", col_sep: '|') do |csv|
|
||||
stats_array = stats_type == "test-stats" ? TEST_STATS_FIELDS : FABRICATION_STATS_FIELDS
|
||||
all_runs.each do |table|
|
||||
table.records.each do |record|
|
||||
csv << stats_array.map { |key| record.values[key] }
|
||||
end
|
||||
end
|
||||
QA::Runtime::Logger.info("File #{data_file_name} contains #{all_runs.count} rows")
|
||||
end
|
||||
end
|
||||
|
||||
# Upload file to GCS
|
||||
#
|
||||
# @param [String] bucket to be uploaded to
|
||||
# @param [String] backup_file_path of file to be uploaded
|
||||
# @return [void]
|
||||
def upload_to_gcs(bucket, backup_file_path)
|
||||
file_path = backup_file_path.tr('_0-9', '')
|
||||
|
||||
# Backup older file
|
||||
begin
|
||||
QA::Runtime::Logger.info("Backing up older file to #{backup_file_path}")
|
||||
gcs_client.copy_object(bucket, file_path, bucket, backup_file_path)
|
||||
rescue Google::Apis::ClientError
|
||||
QA::Runtime::Logger.warn("File #{file_path} is not found in GCS bucket, continuing with upload...")
|
||||
end
|
||||
|
||||
# Upload new file
|
||||
file = gcs_client.put_object(bucket, file_path, File.new(backup_file_path, "r"), force: true)
|
||||
QA::Runtime::Logger.info("File #{file_path} uploaded to gs://#{bucket}/#{file.name}")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,194 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'date'
|
||||
require 'active_support/core_ext/date'
|
||||
|
||||
module QA
|
||||
module Tools
|
||||
class MigrateInfluxDataToGcsJson < MigrateInfluxDataToGcs
|
||||
def initialize(args)
|
||||
super
|
||||
|
||||
@year = args[:year]&.to_i
|
||||
@month = args[:month]&.to_i
|
||||
@day = args[:day]&.to_i
|
||||
|
||||
raise("An year must be provided") unless @year
|
||||
raise("An month must be provided") unless @month
|
||||
end
|
||||
|
||||
# Fetch data from Influx DB, store as JSON and upload to GCS
|
||||
#
|
||||
# @return [void]
|
||||
def migrate_data
|
||||
create_tmp_dir
|
||||
|
||||
INFLUX_BUCKETS.each do |bucket|
|
||||
INFLUX_STATS_TYPE.each do |stats_type|
|
||||
if bucket == Support::InfluxdbTools::INFLUX_MAIN_TEST_METRICS_BUCKET && stats_type == "fabrication-stats"
|
||||
break
|
||||
end
|
||||
|
||||
last_day_of_month = DateTime.new(@year, @month, 1).end_of_month.day
|
||||
|
||||
days = @day ? [@day] : (1..last_day_of_month).to_a
|
||||
|
||||
days.each do |day|
|
||||
date = DateTime.new(@year, @month, day)
|
||||
start_date = date.beginning_of_day.rfc3339
|
||||
end_date = date.end_of_day.rfc3339
|
||||
|
||||
file_name = "#{bucket.end_with?('main') ? 'main' : 'all'}_#{stats_type}_#{date.strftime('%Y-%m-%d')}.json"
|
||||
file_path = File.join('tmp', file_name)
|
||||
influx_to_json(bucket, stats_type, file_path, "start: #{start_date}, stop: #{end_date}")
|
||||
|
||||
# Upload to Google Cloud Storage
|
||||
upload_to_gcs(QA_METRICS_GCS_BUCKET_NAME, file_path, file_name)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Query InfluxDB and store in JSON
|
||||
#
|
||||
# @param [String] influx_bucket bucket to fetch data
|
||||
# @param [String] stats_type of data to fetch
|
||||
# @param [String] data_file_name to store data
|
||||
# @param [String] range for influxdb query
|
||||
# @return [void]
|
||||
def influx_to_json(influx_bucket, stats_type, data_file_name, range)
|
||||
QA::Runtime::Logger.info("Fetching Influx data for stats: '#{stats_type}', " \
|
||||
"bucket: '#{influx_bucket}' in range #{range}...")
|
||||
all_runs = []
|
||||
|
||||
retry_on_exception(sleep_interval: 30) do
|
||||
all_runs = query_api.query(query: query(influx_bucket, stats_type, range))
|
||||
end
|
||||
|
||||
record_objects = []
|
||||
all_runs.each do |table|
|
||||
table.records.each do |record|
|
||||
record_objects << (
|
||||
|
||||
if stats_type == 'test-stats'
|
||||
test_stats(stats_type, record)
|
||||
else
|
||||
fabrication_stats(stats_type, record)
|
||||
end
|
||||
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
begin
|
||||
File.open(data_file_name, 'w') { |f| f.write(record_objects.to_json) }
|
||||
QA::Runtime::Logger.info("Wrote file #{data_file_name} containing #{all_runs.count} rows to disk")
|
||||
rescue StandardError => e
|
||||
QA::Runtime::Logger.error("Failed to write data to file: '#{data_file_name}', " \
|
||||
"influx_bucket: #{influx_bucket}, stats_type: #{stats_type}, error: #{e}")
|
||||
end
|
||||
end
|
||||
|
||||
# Produces a test_stats Hash
|
||||
#
|
||||
# @param [String] stats_type of data
|
||||
# @param [String] record to get the data from
|
||||
# @return [Hash]
|
||||
def test_stats(stats_type, record)
|
||||
{
|
||||
name: stats_type,
|
||||
time: record.values['_time'],
|
||||
tags: tags(record.values),
|
||||
fields: fields(record.values)
|
||||
}
|
||||
end
|
||||
|
||||
# Produces a fabrication_stats Hash
|
||||
#
|
||||
# @param [String] stats_type of data
|
||||
# @param [String] record to get the data from
|
||||
# @return [Hash]
|
||||
def fabrication_stats(stats_type, record)
|
||||
{
|
||||
name: stats_type,
|
||||
time: record.values['_time'],
|
||||
tags: {
|
||||
resource: record.values['resource'],
|
||||
fabrication_method: record.values['fabrication_method'],
|
||||
http_method: record.values['http_method'],
|
||||
run_type: record.values['run_type'],
|
||||
merge_request: record.values['merge_request']
|
||||
},
|
||||
fields: {
|
||||
fabrication_time: record.values['fabrication_time'],
|
||||
info: record.values['info']&.force_encoding('UTF-8'),
|
||||
job_url: record.values['job_url'],
|
||||
timestamp: record.values['timestamp']
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
# Produces a tags Hash
|
||||
#
|
||||
# @param [String] values record's values to get the data from
|
||||
# @return [Hash]
|
||||
def tags(values)
|
||||
tags = values.slice('name', 'file_path', 'status', 'smoke', 'blocking',
|
||||
'quarantined', 'job_name', 'merge_request', 'run_type', 'stage',
|
||||
'product_group', 'testcase', 'exception_class')
|
||||
|
||||
# custom_test_metrics
|
||||
tags['import_repo'] = values['import_repo']
|
||||
tags['import_type'] = values['import_type']
|
||||
|
||||
tags
|
||||
end
|
||||
|
||||
# Produces a fields Hash
|
||||
#
|
||||
# @param [String] values record's values to get the data from
|
||||
# @return [Hash]
|
||||
def fields(values)
|
||||
fields = values.slice('id', 'run_time', 'api_fabrication', 'ui_fabrication',
|
||||
'total_fabrication', 'job_url', 'pipeline_url', 'pipeline_id',
|
||||
'job_id', 'merge_request_iid', 'failure_issue')
|
||||
|
||||
fields['failure_exception'] = values['failure_exception']&.force_encoding('UTF-8')
|
||||
fields['import_time'] = values['import_time'] # custom_test_metrics
|
||||
|
||||
fields
|
||||
end
|
||||
|
||||
# Create a 'tmp' directory
|
||||
#
|
||||
# @return [String]
|
||||
def create_tmp_dir
|
||||
FileUtils.mkdir_p('tmp/')
|
||||
end
|
||||
|
||||
# Upload file to GCS
|
||||
#
|
||||
# @param [String] bucket to be uploaded to
|
||||
# @param [String] file_path of file to be uploaded
|
||||
# @param [String] file_name of file to be uploaded
|
||||
# @return [void]
|
||||
def upload_to_gcs(bucket, file_path, file_name)
|
||||
retry_on_exception(sleep_interval: 30) do
|
||||
file = gcs_client.put_object(bucket, file_name, File.new(file_path, "r"), force: true)
|
||||
QA::Runtime::Logger.info("Uploaded file #{file_path} to #{gcs_url(bucket, file)}")
|
||||
end
|
||||
end
|
||||
|
||||
# Construct the url of the uploaded file in GCS
|
||||
# @param [String] bucket name the file is uploaded to
|
||||
# @param [String] file uploaded to gcs
|
||||
#
|
||||
# @return [String]
|
||||
def gcs_url(bucket, file)
|
||||
"https://storage.cloud.google.com/#{bucket}/#{file.name}"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -36,7 +36,9 @@ describe QA::Support::Formatters::TestMetricsFormatter do
|
|||
{
|
||||
bucket: 'e2e-test-stats',
|
||||
org: 'gitlab-qa',
|
||||
precision: InfluxDB2::WritePrecision::NANOSECOND
|
||||
precision: InfluxDB2::WritePrecision::NANOSECOND,
|
||||
read_timeout: 10,
|
||||
open_timeout: 10
|
||||
}
|
||||
end
|
||||
|
||||
|
|
@ -139,6 +141,7 @@ describe QA::Support::Formatters::TestMetricsFormatter do
|
|||
stub_env('QA_RUN_TYPE', run_type)
|
||||
stub_env('QA_EXPORT_TEST_METRICS', "true")
|
||||
stub_env('QA_RSPEC_RETRIED', "false")
|
||||
stub_env('QA_INFLUXDB_TIMEOUT', "10")
|
||||
end
|
||||
|
||||
context 'with blocking spec' do
|
||||
|
|
|
|||
|
|
@ -1,6 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
desc "Migrate the test results data from InfluxDB to GCS to visualise in Sisense/Tableau"
|
||||
task :influx_to_gcs, [:range] do |_task, args|
|
||||
QA::Tools::MigrateInfluxDataToGcs.run(**args)
|
||||
desc "Migrate the test results data from InfluxDB to GCS in csv format"
|
||||
task :influx_to_gcs_csv, [:hours] do |_task, args|
|
||||
QA::Tools::MigrateInfluxDataToGcsCsv.run(**args)
|
||||
end
|
||||
|
||||
desc "Migrate the test results data from InfluxDB to GCS in json format"
|
||||
task :influx_to_gcs_json, [:year, :month, :day] do |_, args|
|
||||
QA::Tools::MigrateInfluxDataToGcsJson.run(**args)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -180,7 +180,7 @@ RSpec.describe 'Related issues', :js, feature_category: :team_planning do
|
|||
|
||||
it 'add related issue' do
|
||||
click_button 'Add a related issue'
|
||||
fill_in 'Paste issue link', with: "#{issue_b.to_reference(project)} "
|
||||
fill_in 'Enter issue URL', with: "#{issue_b.to_reference(project)} "
|
||||
page.within('.linked-issues-card-body') do
|
||||
click_button 'Add'
|
||||
end
|
||||
|
|
@ -199,7 +199,7 @@ RSpec.describe 'Related issues', :js, feature_category: :team_planning do
|
|||
|
||||
it 'add cross-project related issue' do
|
||||
click_button 'Add a related issue'
|
||||
fill_in 'Paste issue link', with: "#{issue_project_b_a.to_reference(project)} "
|
||||
fill_in 'Enter issue URL', with: "#{issue_project_b_a.to_reference(project)} "
|
||||
page.within('.linked-issues-card-body') do
|
||||
click_button 'Add'
|
||||
end
|
||||
|
|
@ -215,8 +215,8 @@ RSpec.describe 'Related issues', :js, feature_category: :team_planning do
|
|||
|
||||
it 'pressing enter should submit the form' do
|
||||
click_button 'Add a related issue'
|
||||
fill_in 'Paste issue link', with: "#{issue_project_b_a.to_reference(project)} "
|
||||
find_field('Paste issue link').native.send_key(:enter)
|
||||
fill_in 'Enter issue URL', with: "#{issue_project_b_a.to_reference(project)} "
|
||||
find_field('Enter issue URL').native.send_key(:enter)
|
||||
|
||||
wait_for_requests
|
||||
|
||||
|
|
@ -229,7 +229,7 @@ RSpec.describe 'Related issues', :js, feature_category: :team_planning do
|
|||
|
||||
it 'disallows duplicate entries' do
|
||||
click_button 'Add a related issue'
|
||||
fill_in 'Paste issue link', with: 'duplicate duplicate duplicate'
|
||||
fill_in 'Enter issue URL', with: 'duplicate duplicate duplicate'
|
||||
|
||||
items = all('.issue-token')
|
||||
expect(items.count).to eq(1)
|
||||
|
|
@ -242,7 +242,7 @@ RSpec.describe 'Related issues', :js, feature_category: :team_planning do
|
|||
it 'allows us to remove pending issues' do
|
||||
# Tests against https://gitlab.com/gitlab-org/gitlab/issues/11625
|
||||
click_button 'Add a related issue'
|
||||
fill_in 'Paste issue link', with: 'issue1 issue2 issue3 '
|
||||
fill_in 'Enter issue URL', with: 'issue1 issue2 issue3 '
|
||||
|
||||
items = all('.issue-token')
|
||||
expect(items.count).to eq(3)
|
||||
|
|
@ -311,7 +311,7 @@ RSpec.describe 'Related issues', :js, feature_category: :team_planning do
|
|||
|
||||
it 'add related issue' do
|
||||
click_button 'Add a related issue'
|
||||
fill_in 'Paste issue link', with: "##{issue_d.iid} "
|
||||
fill_in 'Enter issue URL', with: "##{issue_d.iid} "
|
||||
page.within('.linked-issues-card-body') do
|
||||
click_button 'Add'
|
||||
end
|
||||
|
|
@ -329,7 +329,7 @@ RSpec.describe 'Related issues', :js, feature_category: :team_planning do
|
|||
|
||||
it 'add invalid related issue' do
|
||||
click_button 'Add a related issue'
|
||||
fill_in 'Paste issue link', with: '#9999999 '
|
||||
fill_in 'Enter issue URL', with: '#9999999 '
|
||||
page.within('.linked-issues-card-body') do
|
||||
click_button 'Add'
|
||||
end
|
||||
|
|
@ -346,7 +346,7 @@ RSpec.describe 'Related issues', :js, feature_category: :team_planning do
|
|||
|
||||
it 'add unauthorized related issue' do
|
||||
click_button 'Add a related issue'
|
||||
fill_in 'Paste issue link', with: "#{issue_project_unauthorized_a.to_reference(project)} "
|
||||
fill_in 'Enter issue URL', with: "#{issue_project_unauthorized_a.to_reference(project)} "
|
||||
page.within('.linked-issues-card-body') do
|
||||
click_button 'Add'
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,32 +1,46 @@
|
|||
import { GlDrawer, GlButton } from '@gitlab/ui';
|
||||
import { GlDrawer, GlButton, GlBadge, GlSprintf, GlIcon } from '@gitlab/ui';
|
||||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import RoleDetailsDrawer from '~/members/components/table/role_details_drawer.vue';
|
||||
import MembersTableCell from '~/members/components/table/members_table_cell.vue';
|
||||
import MemberAvatar from '~/members/components/table/member_avatar.vue';
|
||||
import { member as memberData } from '../../mock_data';
|
||||
import { member as memberData, memberWithCustomRole } from '../../mock_data';
|
||||
|
||||
describe('Role details drawer', () => {
|
||||
const { permissions } = memberWithCustomRole.customRoles[0];
|
||||
let wrapper;
|
||||
|
||||
const createWrapper = ({ member = memberData } = {}) => {
|
||||
const createWrapper = ({ member } = {}) => {
|
||||
wrapper = shallowMountExtended(RoleDetailsDrawer, {
|
||||
propsData: { member },
|
||||
provide: { currentUserId: memberData.user.id, canManageMembers: false },
|
||||
stubs: { MembersTableCell },
|
||||
stubs: { MembersTableCell, GlSprintf },
|
||||
});
|
||||
};
|
||||
|
||||
const findDrawer = () => wrapper.findComponent(GlDrawer);
|
||||
const findCustomRoleBadge = () => wrapper.findComponent(GlBadge);
|
||||
const findDescriptionHeader = () => wrapper.findByTestId('description-header');
|
||||
const findDescriptionValue = () => wrapper.findByTestId('description-value');
|
||||
const findBaseRole = () => wrapper.findByTestId('base-role');
|
||||
const findPermissions = () => wrapper.findAllByTestId('permission');
|
||||
const findPermissionAt = (index) => findPermissions().at(index);
|
||||
const findPermissionNameAt = (index) => wrapper.findAllByTestId('permission-name').at(index);
|
||||
const findPermissionDescriptionAt = (index) =>
|
||||
wrapper.findAllByTestId('permission-description').at(index);
|
||||
|
||||
it('does not show the drawer when there is no member selected', () => {
|
||||
createWrapper({ member: null });
|
||||
createWrapper();
|
||||
|
||||
expect(findDrawer().exists()).toBe(false);
|
||||
});
|
||||
|
||||
describe('when there is a member', () => {
|
||||
describe.each`
|
||||
roleName | member
|
||||
${'base role'} | ${memberData}
|
||||
${'custom role'} | ${memberWithCustomRole}
|
||||
`(`when there is a member (common tests for $roleName)`, ({ member }) => {
|
||||
beforeEach(() => {
|
||||
createWrapper();
|
||||
createWrapper({ member });
|
||||
});
|
||||
|
||||
it('shows the drawer with expected props', () => {
|
||||
|
|
@ -34,25 +48,25 @@ describe('Role details drawer', () => {
|
|||
});
|
||||
|
||||
it('shows the user avatar', () => {
|
||||
expect(wrapper.findComponent(MembersTableCell).props('member')).toBe(memberData);
|
||||
expect(wrapper.findComponent(MembersTableCell).props('member')).toBe(member);
|
||||
expect(wrapper.findComponent(MemberAvatar).props()).toMatchObject({
|
||||
memberType: 'user',
|
||||
isCurrentUser: true,
|
||||
member: memberData,
|
||||
member,
|
||||
});
|
||||
});
|
||||
|
||||
describe('Role name', () => {
|
||||
describe('role name', () => {
|
||||
it('shows the header', () => {
|
||||
expect(wrapper.findByTestId('role-header').text()).toBe('Role');
|
||||
});
|
||||
|
||||
it('shows the role name', () => {
|
||||
expect(wrapper.findByTestId('role-value').text()).toBe('Owner');
|
||||
expect(wrapper.findByTestId('role-value').text()).toContain('Owner');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Permissions', () => {
|
||||
describe('permissions', () => {
|
||||
it('shows the header', () => {
|
||||
expect(wrapper.findByTestId('permissions-header').text()).toBe('Permissions');
|
||||
});
|
||||
|
|
@ -70,4 +84,67 @@ describe('Role details drawer', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when the member has a base role', () => {
|
||||
beforeEach(() => {
|
||||
createWrapper({ member: memberData });
|
||||
});
|
||||
|
||||
it('does not show the custom role badge', () => {
|
||||
expect(findCustomRoleBadge().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('does not show the role description', () => {
|
||||
expect(findDescriptionHeader().exists()).toBe(false);
|
||||
expect(findDescriptionValue().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('does not show the base role', () => {
|
||||
expect(findBaseRole().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('does not show any permissions', () => {
|
||||
expect(findPermissions()).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when the member has a custom role', () => {
|
||||
beforeEach(() => {
|
||||
createWrapper({ member: memberWithCustomRole });
|
||||
});
|
||||
|
||||
it('shows the custom role badge', () => {
|
||||
expect(findCustomRoleBadge().props('size')).toBe('sm');
|
||||
expect(findCustomRoleBadge().text()).toBe('Custom role');
|
||||
});
|
||||
|
||||
it('shows the role description', () => {
|
||||
expect(findDescriptionHeader().text()).toBe('Description');
|
||||
expect(findDescriptionValue().text()).toBe('Custom role description');
|
||||
});
|
||||
|
||||
it('shows the base role', () => {
|
||||
expect(findBaseRole().text()).toMatchInterpolatedText('Base role: Owner');
|
||||
});
|
||||
|
||||
it('shows the expected number of permissions', () => {
|
||||
expect(findPermissions()).toHaveLength(2);
|
||||
});
|
||||
|
||||
describe.each(permissions)(`for permission '$name'`, (permission) => {
|
||||
const index = permissions.indexOf(permission);
|
||||
|
||||
it('shows the check icon', () => {
|
||||
expect(findPermissionAt(index).findComponent(GlIcon).props('name')).toBe('check');
|
||||
});
|
||||
|
||||
it('shows the permission name', () => {
|
||||
expect(findPermissionNameAt(index).text()).toBe(`Permission ${index}`);
|
||||
});
|
||||
|
||||
it('shows the permission description', () => {
|
||||
expect(findPermissionDescriptionAt(index).text()).toBe(`Permission description ${index}`);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -54,6 +54,27 @@ export const member = {
|
|||
customRoles: [],
|
||||
};
|
||||
|
||||
export const memberWithCustomRole = {
|
||||
...member,
|
||||
...{
|
||||
accessLevel: {
|
||||
...member.accessLevel,
|
||||
memberRoleId: 1,
|
||||
description: 'Custom role description',
|
||||
},
|
||||
customRoles: [
|
||||
{
|
||||
memberRoleId: 1,
|
||||
baseAccessLevel: 50,
|
||||
permissions: [
|
||||
{ name: 'Permission 0', description: 'Permission description 0' },
|
||||
{ name: 'Permission 1', description: 'Permission description 1' },
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export const group = {
|
||||
accessLevel: { integerValue: 10, stringValue: 'Guest' },
|
||||
sharedWithGroup: {
|
||||
|
|
|
|||
|
|
@ -268,160 +268,133 @@ describe('buildClient', () => {
|
|||
expect(getQueryParam()).toContain('page_size=10');
|
||||
});
|
||||
|
||||
it('converts filter to proper query params', async () => {
|
||||
await client.fetchTraces({
|
||||
filters: {
|
||||
attributes: {
|
||||
durationMs: [
|
||||
{ operator: '>', value: '100' },
|
||||
{ operator: '<', value: '1000' },
|
||||
],
|
||||
operation: [
|
||||
{ operator: '=', value: 'op' },
|
||||
{ operator: '!=', value: 'not-op' },
|
||||
],
|
||||
service: [
|
||||
{ operator: '=', value: 'service' },
|
||||
{ operator: '!=', value: 'not-service' },
|
||||
],
|
||||
period: [{ operator: '=', value: '5m' }],
|
||||
status: [
|
||||
{ operator: '=', value: 'ok' },
|
||||
{ operator: '!=', value: 'error' },
|
||||
],
|
||||
traceId: [
|
||||
{ operator: '=', value: 'trace-id' },
|
||||
{ operator: '!=', value: 'not-trace-id' },
|
||||
],
|
||||
attribute: [{ operator: '=', value: 'name1=value1' }],
|
||||
},
|
||||
},
|
||||
describe('date range filter', () => {
|
||||
it('handle predefined date range value', async () => {
|
||||
await client.fetchTraces({
|
||||
filters: { dateRange: { value: '5m' } },
|
||||
});
|
||||
expect(getQueryParam()).toContain(`period=5m`);
|
||||
});
|
||||
expect(getQueryParam()).toContain(
|
||||
'gt[duration_nano]=100000000<[duration_nano]=1000000000' +
|
||||
'&operation=op¬[operation]=not-op' +
|
||||
'&service_name=service¬[service_name]=not-service' +
|
||||
'&period=5m' +
|
||||
'&trace_id=trace-id¬[trace_id]=not-trace-id' +
|
||||
'&attr_name=name1&attr_value=value1' +
|
||||
'&status=ok¬[status]=error',
|
||||
);
|
||||
});
|
||||
describe('date range time filter', () => {
|
||||
it('handles custom date range period filter', async () => {
|
||||
|
||||
it('handle custom date range value', async () => {
|
||||
await client.fetchTraces({
|
||||
filters: {
|
||||
attributes: {
|
||||
period: [{ operator: '=', value: '2023-01-01 - 2023-02-01' }],
|
||||
dateRange: {
|
||||
endDate: new Date('2020-07-06'),
|
||||
startDate: new Date('2020-07-05'),
|
||||
value: 'custom',
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(getQueryParam()).not.toContain('period=');
|
||||
expect(getQueryParam()).toContain(
|
||||
'start_time=2023-01-01T00:00:00.000Z&end_time=2023-02-01T00:00:00.000Z',
|
||||
'start_time=2020-07-05T00:00:00.000Z&end_time=2020-07-06T00:00:00.000Z',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('attributes filters', () => {
|
||||
it('converts filter to proper query params', async () => {
|
||||
await client.fetchTraces({
|
||||
filters: {
|
||||
attributes: {
|
||||
durationMs: [
|
||||
{ operator: '>', value: '100' },
|
||||
{ operator: '<', value: '1000' },
|
||||
],
|
||||
operation: [
|
||||
{ operator: '=', value: 'op' },
|
||||
{ operator: '!=', value: 'not-op' },
|
||||
],
|
||||
service: [
|
||||
{ operator: '=', value: 'service' },
|
||||
{ operator: '!=', value: 'not-service' },
|
||||
],
|
||||
status: [
|
||||
{ operator: '=', value: 'ok' },
|
||||
{ operator: '!=', value: 'error' },
|
||||
],
|
||||
traceId: [
|
||||
{ operator: '=', value: 'trace-id' },
|
||||
{ operator: '!=', value: 'not-trace-id' },
|
||||
],
|
||||
attribute: [{ operator: '=', value: 'name1=value1' }],
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(getQueryParam()).toContain(
|
||||
'gt[duration_nano]=100000000<[duration_nano]=1000000000' +
|
||||
'&operation=op¬[operation]=not-op' +
|
||||
'&service_name=service¬[service_name]=not-service' +
|
||||
'&trace_id=trace-id¬[trace_id]=not-trace-id' +
|
||||
'&attr_name=name1&attr_value=value1' +
|
||||
'&status=ok¬[status]=error',
|
||||
);
|
||||
});
|
||||
|
||||
it.each([
|
||||
'invalid - 2023-02-01',
|
||||
'2023-02-01 - invalid',
|
||||
'invalid - invalid',
|
||||
'2023-01-01 / 2023-02-01',
|
||||
'2023-01-01 2023-02-01',
|
||||
'2023-01-01 - 2023-02-01 - 2023-02-01',
|
||||
])('ignore invalid values', async (val) => {
|
||||
it('ignores unsupported filters', async () => {
|
||||
await client.fetchTraces({
|
||||
filters: {
|
||||
attributes: {
|
||||
period: [{ operator: '=', value: val }],
|
||||
unsupportedFilter: [{ operator: '=', value: 'foo' }],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(getQueryParam()).not.toContain('start_time=');
|
||||
expect(getQueryParam()).not.toContain('end_time=');
|
||||
expect(getQueryParam()).not.toContain('period=');
|
||||
expect(getQueryParam()).toBe(`sort=${SORTING_OPTIONS.TIMESTAMP_DESC}`);
|
||||
});
|
||||
});
|
||||
|
||||
it('handles repeated params', async () => {
|
||||
await client.fetchTraces({
|
||||
filters: {
|
||||
attributes: {
|
||||
operation: [
|
||||
{ operator: '=', value: 'op' },
|
||||
{ operator: '=', value: 'op2' },
|
||||
],
|
||||
it('ignores empty filters', async () => {
|
||||
await client.fetchTraces({
|
||||
filters: {
|
||||
attributes: {
|
||||
durationMs: null,
|
||||
traceId: undefined,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(getQueryParam()).toContain('operation=op&operation=op2');
|
||||
});
|
||||
});
|
||||
|
||||
it('ignores unsupported filters', async () => {
|
||||
await client.fetchTraces({
|
||||
filters: {
|
||||
attributes: {
|
||||
unsupportedFilter: [{ operator: '=', value: 'foo' }],
|
||||
expect(getQueryParam()).toBe(`sort=${SORTING_OPTIONS.TIMESTAMP_DESC}`);
|
||||
});
|
||||
|
||||
it('ignores non-array filters', async () => {
|
||||
await client.fetchTraces({
|
||||
filters: {
|
||||
attributes: {
|
||||
traceId: { operator: '=', value: 'foo' },
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(getQueryParam()).toBe(`sort=${SORTING_OPTIONS.TIMESTAMP_DESC}`);
|
||||
});
|
||||
|
||||
expect(getQueryParam()).toBe(`sort=${SORTING_OPTIONS.TIMESTAMP_DESC}`);
|
||||
});
|
||||
|
||||
it('ignores empty filters', async () => {
|
||||
await client.fetchTraces({
|
||||
filters: {
|
||||
attributes: {
|
||||
durationMs: null,
|
||||
traceId: undefined,
|
||||
it('ignores unsupported operators', async () => {
|
||||
await client.fetchTraces({
|
||||
filters: {
|
||||
attributes: {
|
||||
durationMs: [
|
||||
{ operator: '*', value: 'foo' },
|
||||
{ operator: '=', value: 'foo' },
|
||||
{ operator: '!=', value: 'foo' },
|
||||
],
|
||||
operation: [
|
||||
{ operator: '>', value: 'foo' },
|
||||
{ operator: '<', value: 'foo' },
|
||||
],
|
||||
service: [
|
||||
{ operator: '>', value: 'foo' },
|
||||
{ operator: '<', value: 'foo' },
|
||||
],
|
||||
traceId: [
|
||||
{ operator: '>', value: 'foo' },
|
||||
{ operator: '<', value: 'foo' },
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(getQueryParam()).toBe(`sort=${SORTING_OPTIONS.TIMESTAMP_DESC}`);
|
||||
});
|
||||
|
||||
expect(getQueryParam()).toBe(`sort=${SORTING_OPTIONS.TIMESTAMP_DESC}`);
|
||||
});
|
||||
|
||||
it('ignores non-array filters', async () => {
|
||||
await client.fetchTraces({
|
||||
filters: {
|
||||
attributes: {
|
||||
traceId: { operator: '=', value: 'foo' },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(getQueryParam()).toBe(`sort=${SORTING_OPTIONS.TIMESTAMP_DESC}`);
|
||||
});
|
||||
|
||||
it('ignores unsupported operators', async () => {
|
||||
await client.fetchTraces({
|
||||
filters: {
|
||||
attributes: {
|
||||
durationMs: [
|
||||
{ operator: '*', value: 'foo' },
|
||||
{ operator: '=', value: 'foo' },
|
||||
{ operator: '!=', value: 'foo' },
|
||||
],
|
||||
operation: [
|
||||
{ operator: '>', value: 'foo' },
|
||||
{ operator: '<', value: 'foo' },
|
||||
],
|
||||
service: [
|
||||
{ operator: '>', value: 'foo' },
|
||||
{ operator: '<', value: 'foo' },
|
||||
],
|
||||
period: [{ operator: '!=', value: 'foo' }],
|
||||
traceId: [
|
||||
{ operator: '>', value: 'foo' },
|
||||
{ operator: '<', value: 'foo' },
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(getQueryParam()).toBe(`sort=${SORTING_OPTIONS.TIMESTAMP_DESC}`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -488,157 +461,133 @@ describe('buildClient', () => {
|
|||
expect(getQueryParam()).toBe(``);
|
||||
});
|
||||
|
||||
it('converts filter to proper query params', async () => {
|
||||
await client.fetchTracesAnalytics({
|
||||
filters: {
|
||||
attributes: {
|
||||
durationMs: [
|
||||
{ operator: '>', value: '100' },
|
||||
{ operator: '<', value: '1000' },
|
||||
],
|
||||
operation: [
|
||||
{ operator: '=', value: 'op' },
|
||||
{ operator: '!=', value: 'not-op' },
|
||||
],
|
||||
service: [
|
||||
{ operator: '=', value: 'service' },
|
||||
{ operator: '!=', value: 'not-service' },
|
||||
],
|
||||
period: [{ operator: '=', value: '5m' }],
|
||||
status: [
|
||||
{ operator: '=', value: 'ok' },
|
||||
{ operator: '!=', value: 'error' },
|
||||
],
|
||||
traceId: [
|
||||
{ operator: '=', value: 'trace-id' },
|
||||
{ operator: '!=', value: 'not-trace-id' },
|
||||
],
|
||||
attribute: [{ operator: '=', value: 'name1=value1' }],
|
||||
},
|
||||
},
|
||||
describe('date range filter', () => {
|
||||
it('handle predefined date range value', async () => {
|
||||
await client.fetchTracesAnalytics({
|
||||
filters: { dateRange: { value: '5m' } },
|
||||
});
|
||||
expect(getQueryParam()).toContain(`period=5m`);
|
||||
});
|
||||
expect(getQueryParam()).toContain(
|
||||
'gt[duration_nano]=100000000<[duration_nano]=1000000000' +
|
||||
'&operation=op¬[operation]=not-op' +
|
||||
'&service_name=service¬[service_name]=not-service' +
|
||||
'&period=5m' +
|
||||
'&trace_id=trace-id¬[trace_id]=not-trace-id' +
|
||||
'&attr_name=name1&attr_value=value1' +
|
||||
'&status=ok¬[status]=error',
|
||||
);
|
||||
});
|
||||
describe('date range time filter', () => {
|
||||
it('handles custom date range period filter', async () => {
|
||||
|
||||
it('handle custom date range value', async () => {
|
||||
await client.fetchTracesAnalytics({
|
||||
filters: {
|
||||
attributes: {
|
||||
period: [{ operator: '=', value: '2023-01-01 - 2023-02-01' }],
|
||||
dateRange: {
|
||||
endDate: new Date('2020-07-06'),
|
||||
startDate: new Date('2020-07-05'),
|
||||
value: 'custom',
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(getQueryParam()).not.toContain('period=');
|
||||
expect(getQueryParam()).toContain(
|
||||
'start_time=2023-01-01T00:00:00.000Z&end_time=2023-02-01T00:00:00.000Z',
|
||||
'start_time=2020-07-05T00:00:00.000Z&end_time=2020-07-06T00:00:00.000Z',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('attributes filters', () => {
|
||||
it('converts filter to proper query params', async () => {
|
||||
await client.fetchTracesAnalytics({
|
||||
filters: {
|
||||
attributes: {
|
||||
durationMs: [
|
||||
{ operator: '>', value: '100' },
|
||||
{ operator: '<', value: '1000' },
|
||||
],
|
||||
operation: [
|
||||
{ operator: '=', value: 'op' },
|
||||
{ operator: '!=', value: 'not-op' },
|
||||
],
|
||||
service: [
|
||||
{ operator: '=', value: 'service' },
|
||||
{ operator: '!=', value: 'not-service' },
|
||||
],
|
||||
status: [
|
||||
{ operator: '=', value: 'ok' },
|
||||
{ operator: '!=', value: 'error' },
|
||||
],
|
||||
traceId: [
|
||||
{ operator: '=', value: 'trace-id' },
|
||||
{ operator: '!=', value: 'not-trace-id' },
|
||||
],
|
||||
attribute: [{ operator: '=', value: 'name1=value1' }],
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(getQueryParam()).toContain(
|
||||
'gt[duration_nano]=100000000<[duration_nano]=1000000000' +
|
||||
'&operation=op¬[operation]=not-op' +
|
||||
'&service_name=service¬[service_name]=not-service' +
|
||||
'&trace_id=trace-id¬[trace_id]=not-trace-id' +
|
||||
'&attr_name=name1&attr_value=value1' +
|
||||
'&status=ok¬[status]=error',
|
||||
);
|
||||
});
|
||||
|
||||
it.each([
|
||||
'invalid - 2023-02-01',
|
||||
'2023-02-01 - invalid',
|
||||
'invalid - invalid',
|
||||
'2023-01-01 / 2023-02-01',
|
||||
'2023-01-01 2023-02-01',
|
||||
'2023-01-01 - 2023-02-01 - 2023-02-01',
|
||||
])('ignore invalid values', async (val) => {
|
||||
it('ignores unsupported filters', async () => {
|
||||
await client.fetchTracesAnalytics({
|
||||
filters: {
|
||||
period: [{ operator: '=', value: val }],
|
||||
attributes: {
|
||||
unsupportedFilter: [{ operator: '=', value: 'foo' }],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(getQueryParam()).not.toContain('start_time=');
|
||||
expect(getQueryParam()).not.toContain('end_time=');
|
||||
expect(getQueryParam()).not.toContain('period=');
|
||||
expect(getQueryParam()).toBe(``);
|
||||
});
|
||||
});
|
||||
|
||||
it('handles repeated params', async () => {
|
||||
await client.fetchTracesAnalytics({
|
||||
filters: {
|
||||
attributes: {
|
||||
operation: [
|
||||
{ operator: '=', value: 'op' },
|
||||
{ operator: '=', value: 'op2' },
|
||||
],
|
||||
it('ignores empty filters', async () => {
|
||||
await client.fetchTracesAnalytics({
|
||||
filters: {
|
||||
attributes: {
|
||||
durationMs: null,
|
||||
traceId: undefined,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(getQueryParam()).toContain('operation=op&operation=op2');
|
||||
});
|
||||
});
|
||||
|
||||
it('ignores unsupported filters', async () => {
|
||||
await client.fetchTracesAnalytics({
|
||||
filters: {
|
||||
attributes: {
|
||||
unsupportedFilter: [{ operator: '=', value: 'foo' }],
|
||||
expect(getQueryParam()).toBe(``);
|
||||
});
|
||||
|
||||
it('ignores non-array filters', async () => {
|
||||
await client.fetchTracesAnalytics({
|
||||
filters: {
|
||||
attributes: {
|
||||
traceId: { operator: '=', value: 'foo' },
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(getQueryParam()).toBe(``);
|
||||
});
|
||||
|
||||
expect(getQueryParam()).toBe(``);
|
||||
});
|
||||
|
||||
it('ignores empty filters', async () => {
|
||||
await client.fetchTracesAnalytics({
|
||||
filters: {
|
||||
attributes: {
|
||||
durationMs: null,
|
||||
it('ignores unsupported operators', async () => {
|
||||
await client.fetchTracesAnalytics({
|
||||
filters: {
|
||||
attributes: {
|
||||
durationMs: [
|
||||
{ operator: '*', value: 'foo' },
|
||||
{ operator: '=', value: 'foo' },
|
||||
{ operator: '!=', value: 'foo' },
|
||||
],
|
||||
operation: [
|
||||
{ operator: '>', value: 'foo' },
|
||||
{ operator: '<', value: 'foo' },
|
||||
],
|
||||
service: [
|
||||
{ operator: '>', value: 'foo' },
|
||||
{ operator: '<', value: 'foo' },
|
||||
],
|
||||
traceId: [
|
||||
{ operator: '>', value: 'foo' },
|
||||
{ operator: '<', value: 'foo' },
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(getQueryParam()).toBe(``);
|
||||
});
|
||||
|
||||
expect(getQueryParam()).toBe(``);
|
||||
});
|
||||
|
||||
it('ignores non-array filters', async () => {
|
||||
await client.fetchTracesAnalytics({
|
||||
filters: {
|
||||
attributes: {
|
||||
traceId: { operator: '=', value: 'foo' },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(getQueryParam()).toBe(``);
|
||||
});
|
||||
|
||||
it('ignores unsupported operators', async () => {
|
||||
await client.fetchTracesAnalytics({
|
||||
filters: {
|
||||
attributes: {
|
||||
durationMs: [
|
||||
{ operator: '*', value: 'foo' },
|
||||
{ operator: '=', value: 'foo' },
|
||||
{ operator: '!=', value: 'foo' },
|
||||
],
|
||||
operation: [
|
||||
{ operator: '>', value: 'foo' },
|
||||
{ operator: '<', value: 'foo' },
|
||||
],
|
||||
service: [
|
||||
{ operator: '>', value: 'foo' },
|
||||
{ operator: '<', value: 'foo' },
|
||||
],
|
||||
period: [{ operator: '!=', value: 'foo' }],
|
||||
traceId: [
|
||||
{ operator: '>', value: 'foo' },
|
||||
{ operator: '<', value: 'foo' },
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(getQueryParam()).toBe(``);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ describe('RelatedIssuableInput', () => {
|
|||
mountComponent();
|
||||
|
||||
expect(wrapper.findComponent({ ref: 'input' }).element.placeholder).toBe(
|
||||
'Paste issue link or <#issue id>',
|
||||
'Enter issue URL or <#issue ID>',
|
||||
);
|
||||
});
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue