Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-10-18 12:10:46 +00:00
parent 990891feb7
commit 91ca0550e0
81 changed files with 1185 additions and 238 deletions

View File

@ -1061,8 +1061,6 @@ Layout/ArgumentAlignment:
- 'ee/spec/lib/gitlab/license_scanning/package_licenses_spec.rb'
- 'ee/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb'
- 'ee/spec/lib/gitlab/status_page_spec.rb'
- 'ee/spec/lib/gitlab/usage/metrics/instrumentations/count_approval_project_rules_metric_spec.rb'
- 'ee/spec/lib/gitlab/usage/metrics/instrumentations/count_ci_environments_approval_required_spec.rb'
- 'ee/spec/lib/gitlab/zoekt/search_results_spec.rb'
- 'ee/spec/lib/incident_management/oncall_shift_generator_spec.rb'
- 'ee/spec/lib/omni_auth/strategies/group_saml_spec.rb'
@ -1590,14 +1588,6 @@ Layout/ArgumentAlignment:
- 'spec/lib/gitlab/suggestions/file_suggestion_spec.rb'
- 'spec/lib/gitlab/suggestions/suggestion_set_spec.rb'
- 'spec/lib/gitlab/tracking/destinations/snowplow_spec.rb'
- 'spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb'
- 'spec/lib/gitlab/usage/metrics/instrumentations/count_ci_internal_pipelines_metric_spec.rb'
- 'spec/lib/gitlab/usage/metrics/instrumentations/count_issues_created_manually_from_alerts_metric_spec.rb'
- 'spec/lib/gitlab/usage/metrics/instrumentations/incoming_email_encrypted_secrets_enabled_metric_spec.rb'
- 'spec/lib/gitlab/usage/metrics/instrumentations/service_desk_email_encrypted_secrets_enabled_metric_spec.rb'
- 'spec/lib/gitlab/usage/metrics/query_spec.rb'
- 'spec/lib/gitlab/usage_data_queries_spec.rb'
- 'spec/lib/gitlab/usage_data_spec.rb'
- 'spec/lib/gitlab/utils/lazy_attributes_spec.rb'
- 'spec/lib/gitlab/workhorse_spec.rb'
- 'spec/lib/google_api/cloud_platform/client_spec.rb'

View File

@ -104,7 +104,7 @@ gem 'akismet', '~> 3.0' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'invisible_captcha', '~> 2.1.0' # rubocop:todo Gemfile/MissingFeatureCategory
# Two-factor authentication
gem 'devise-two-factor', '~> 4.0.2' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'devise-two-factor', '~> 4.1.1' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'rqrcode', '~> 2.0', feature_category: :system_access
gem 'attr_encrypted', '~> 3.2.4', path: 'vendor/gems/attr_encrypted' # rubocop:todo Gemfile/MissingFeatureCategory
@ -139,8 +139,8 @@ gem 'grape-path-helpers', '~> 1.7.1', feature_category: :api
gem 'rack-cors', '~> 2.0.1', require: 'rack/cors' # rubocop:todo Gemfile/MissingFeatureCategory
# GraphQL API
gem 'graphql', '~> 1.13.19', feature_category: :api
gem 'graphql-docs', '~> 2.1.0', group: [:development, :test], feature_category: :api
gem 'graphql', '~> 2.0.27', feature_category: :api
gem 'graphql-docs', '~> 4.0.0', group: [:development, :test], feature_category: :api
gem 'graphiql-rails', '~> 1.8.0', feature_category: :api
gem 'apollo_upload_server', '~> 2.1.0', feature_category: :api
gem 'graphlient', '~> 0.5.0', feature_category: :importers # Used by BulkImport feature (group::import)

View File

@ -97,6 +97,7 @@
{"name":"cvss-suite","version":"3.0.1","platform":"ruby","checksum":"b5ca9e9e94032a42fd0dc28c1e305378b62c949e35ed7111fc4a1d76f68ad3f9"},
{"name":"danger","version":"9.3.1","platform":"ruby","checksum":"9070fbac181eb45fb9b69ea25e6ea4faa86796ef33bf8d00346cab4385e51df5"},
{"name":"danger-gitlab","version":"8.0.0","platform":"ruby","checksum":"497dd7d0f6513913de651019223d8058cf494df10acbd17de92b175dfa04a3a8"},
{"name":"dartsass","version":"1.49.8","platform":"ruby","checksum":"267e7262a5655c8f0baa1ef663e976252bdbfa8bbf40c175153544a2dc8e1345"},
{"name":"database_cleaner","version":"1.7.0","platform":"ruby","checksum":"bdf833c197afac7054015bcde2567c3834c366bbfe6a377c30151ca984b32016"},
{"name":"date","version":"3.3.3","platform":"java","checksum":"584e0a582d1eb2207b4eaac089d8a43f2ca10bea02682f286099642f15c56cce"},
{"name":"date","version":"3.3.3","platform":"ruby","checksum":"819792019d5712b748fb15f6dfaaedef14b0328723ef23583ea35f186774530f"},
@ -113,7 +114,7 @@
{"name":"devfile","version":"0.0.23.pre.alpha1","platform":"x86_64-linux","checksum":"30e31b39599b7823673f5386f8bf19b7cb2b959c7f34a16704893db437d42094"},
{"name":"device_detector","version":"1.0.0","platform":"ruby","checksum":"b800fb3150b00c23e87b6768011808ac1771fffaae74c3238ebaf2b782947a7d"},
{"name":"devise","version":"4.8.1","platform":"ruby","checksum":"fdd48bbe79a89e7c1152236a70479842ede48bea4fa7f4f2d8da1f872559803e"},
{"name":"devise-two-factor","version":"4.0.2","platform":"ruby","checksum":"6548d2696ed090d27046f888f4fa7380f151e0f823902d46fd9b91e7d0cac511"},
{"name":"devise-two-factor","version":"4.1.1","platform":"ruby","checksum":"c95f5b07533e62217aaed3c386874d94e2d472fb5f2b6598afe8600fc17a8b95"},
{"name":"diff-lcs","version":"1.5.0","platform":"ruby","checksum":"49b934001c8c6aedb37ba19daec5c634da27b318a7a3c654ae979d6ba1929b67"},
{"name":"diff_match_patch","version":"0.1.0","platform":"ruby","checksum":"b36057bfcfeaedf19dcb7b2c28c19ee625bd6ec6d0d182717d3ef22b3879c40e"},
{"name":"diffy","version":"3.4.2","platform":"ruby","checksum":"36b42ffbe5138ddc56182107c24ad8d6b066ecfd2876829f391e3a4993d89ae1"},
@ -144,14 +145,14 @@
{"name":"email_spec","version":"2.2.0","platform":"ruby","checksum":"60b7980580a835e7f676db60667f17a2d60e8e0e39c26d81cfc231805c544d79"},
{"name":"encryptor","version":"3.0.0","platform":"ruby","checksum":"abf23f94ab4d864b8cea85b43f3432044a60001982cda7c33c1cd90da8db1969"},
{"name":"erubi","version":"1.12.0","platform":"ruby","checksum":"27bedb74dfb1e04ff60674975e182d8ca787f2224f2e8143268c7696f42e4723"},
{"name":"escape_utils","version":"1.2.1","platform":"ruby","checksum":"e5292fe8d7e12a9bcb4502d99e28fb602e4e1514690d98a1c4957f6f77b4b162"},
{"name":"escape_utils","version":"1.3.0","platform":"ruby","checksum":"dffb7010922880ace6ceed642156c64e2a64620f27e0849f43bc4f68fd3c2c09"},
{"name":"et-orbi","version":"1.2.7","platform":"ruby","checksum":"3b693d47f94a4060ccc07e60adda488759b1e8b9228a633ebbad842dfc245fb4"},
{"name":"ethon","version":"0.16.0","platform":"ruby","checksum":"bba0da1cea8ac3e1f5cdd7cb1cb5fc78d7ac562c33736f18f0c3eb2b63053d9e"},
{"name":"excon","version":"0.99.0","platform":"ruby","checksum":"09f0de591b5bd1c642680aa1340538a16b90c1111694b46f61f6d8bfdd340249"},
{"name":"execjs","version":"2.8.1","platform":"ruby","checksum":"6d939919cfd81bcc4d6556f322c3995a70cfe4289ea0bd3b1f999b489c323088"},
{"name":"expgen","version":"0.1.1","platform":"ruby","checksum":"4e6a0f65b210a201d6045debb3e62a24e33251a49f81a11b067d303a60d3a239"},
{"name":"expression_parser","version":"0.9.0","platform":"ruby","checksum":"2b56db3cffc48c3337f4f29f5bc2374c86e7ba29acb40269c74bb55af9f868a4"},
{"name":"extended-markdown-filter","version":"0.6.0","platform":"ruby","checksum":"46844b5740b1703a0e0674e31a17c83d1244a3198abb3aae51cad1eb152eb19e"},
{"name":"extended-markdown-filter","version":"0.7.0","platform":"ruby","checksum":"c8eeef7409fbae18c6b407cd3e4eeb5d25c35cb08fe1ac06f375df3db2d4f138"},
{"name":"factory_bot","version":"6.2.0","platform":"ruby","checksum":"d181902cdda531cf6cef036001b3a700a7b5e04bac63976864530120b2ac7d13"},
{"name":"factory_bot_rails","version":"6.2.0","platform":"ruby","checksum":"278b969666b078e76e1c972c501da9b1fac15e5b0ff328cc7ce400366164d0a1"},
{"name":"faraday","version":"1.10.0","platform":"ruby","checksum":"a42158d5c1932c16fd483c512f7e0797b4916096bcf0eb5fb927a1c915a7ea02"},
@ -263,9 +264,9 @@
{"name":"graphiql-rails","version":"1.8.0","platform":"ruby","checksum":"02e2c5098be2c6c29219a0e9b2910a2cd3c494301587a3199a7c4484d8038ed1"},
{"name":"graphlient","version":"0.5.0","platform":"ruby","checksum":"0f2c9416142e50b6bd4edcd86fe6810f792951732c487f9061aee6d420e0f292"},
{"name":"graphlyte","version":"1.0.0","platform":"ruby","checksum":"b5af4ab67dde6e961f00ea1c18f159f73b52ed11395bb4ece297fe628fa1804d"},
{"name":"graphql","version":"1.13.19","platform":"ruby","checksum":"43581db30e21f781d3c175e85807071dc0ba94304d59621b44116f817a5f5a5a"},
{"name":"graphql-client","version":"0.17.0","platform":"ruby","checksum":"5aaf02ce8f2dbc8e3ba05a7eaeb3ad9336762c4424c6093f4438fbb9490eeb5d"},
{"name":"graphql-docs","version":"2.1.0","platform":"ruby","checksum":"7eb82402f8fda455104b2b60364e9ada145d79d3121a8f915790d49da38bb576"},
{"name":"graphql","version":"2.0.27","platform":"ruby","checksum":"1f59be5a770248595971a261c96edef3adcf323e93387e53d1ca1ffd16448b36"},
{"name":"graphql-client","version":"0.18.0","platform":"ruby","checksum":"98aadc810f23dce5404621903945aa584279574f87855b4301d69c90ddc6250b"},
{"name":"graphql-docs","version":"4.0.0","platform":"ruby","checksum":"f68296959263db26e1b7ba7058856d67b641cf508187222268be58f09dfa02d7"},
{"name":"grpc","version":"1.58.0","platform":"ruby","checksum":"0cfbccbe955d318909c58d3e207969a8145fc837e7edd2f3992a9d40b13e9d02"},
{"name":"grpc","version":"1.58.0","platform":"x64-mingw-ucrt","checksum":"50d733ee66940c8ed952c85892090becbd1a11791d76218d93c16c5b4714c026"},
{"name":"grpc","version":"1.58.0","platform":"x64-mingw32","checksum":"1118e9d977c40d77529d2f0ecad51ca0346bfee30893f0eb96f35a2de674faeb"},
@ -521,7 +522,7 @@
{"name":"reverse_markdown","version":"1.4.0","platform":"ruby","checksum":"a3305da1509ac8388fa84a28745621113e121383402a2e8e9350ba649034e870"},
{"name":"rexml","version":"3.2.6","platform":"ruby","checksum":"e0669a2d4e9f109951cb1fde723d8acd285425d81594a2ea929304af50282816"},
{"name":"rinku","version":"2.0.0","platform":"ruby","checksum":"3e695aaf9f24baba3af45823b5c427b58a624582132f18482320e2737f9f8a85"},
{"name":"rotp","version":"6.2.0","platform":"ruby","checksum":"239a2eefba6f1bd4157b2c735d0f975598e0ef94823eea2f35d103d2e5cc0787"},
{"name":"rotp","version":"6.3.0","platform":"ruby","checksum":"75d40087e65ed0d8022c33055a6306c1c400d1c12261932533b5d6cbcd868854"},
{"name":"rouge","version":"4.1.3","platform":"ruby","checksum":"9c8663db26e05e52b3b0286daacae73ebb361c1bd31d7febd8c57087faa0b9a5"},
{"name":"rqrcode","version":"2.2.0","platform":"ruby","checksum":"23eea88bb44c7ee6d6cab9354d08c287f7ebcdc6112e1fe7bcc2d010d1ffefc1"},
{"name":"rqrcode_core","version":"1.2.0","platform":"ruby","checksum":"cf4989dc82d24e2877984738c4ee569308625fed2a810960f1b02d68d0308d1a"},
@ -560,8 +561,6 @@
{"name":"safe_yaml","version":"1.0.4","platform":"ruby","checksum":"248193992ef1730a0c9ec579999ef2256a2b3a32a9bd9d708a1e12544a489ec2"},
{"name":"safety_net_attestation","version":"0.4.0","platform":"ruby","checksum":"96be2d74e7ed26453a51894913449bea0e072f44490021545ac2d1c38b0718ce"},
{"name":"sanitize","version":"6.0.2","platform":"ruby","checksum":"48c4eb8e92bb1699056b6000986ac50fc9df82f458a941abf2c4d6759bccd5cf"},
{"name":"sass","version":"3.5.5","platform":"ruby","checksum":"1bb5431bc620ce29076728a4c8f7b4acb55066ed9df8cf5d57db6cda450d8080"},
{"name":"sass-listen","version":"4.0.0","platform":"ruby","checksum":"ae9dcb76dd3e234329e5ba6e213f48e532c5a3e7b0b4d8a87f13aaca0cc18377"},
{"name":"sassc","version":"2.4.0","platform":"ruby","checksum":"4c60a2b0a3b36685c83b80d5789401c2f678c1652e3288315a1551d811d9f83e"},
{"name":"sassc","version":"2.4.0","platform":"x64-mingw32","checksum":"8773b917cb52c7e92c94d4bf324c1c0be3e50d9092f9f5ed4c3c6e454b451c5e"},
{"name":"sassc-rails","version":"2.1.0","platform":"ruby","checksum":"764dcc74e06930e3483caf0d595084d11f2b0fefd6539abf487cdddfba6cafa2"},

View File

@ -410,6 +410,7 @@ GEM
danger-gitlab (8.0.0)
danger
gitlab (~> 4.2, >= 4.2.0)
dartsass (1.49.8)
database_cleaner (1.7.0)
date (3.3.3)
dead_end (3.1.1)
@ -442,11 +443,11 @@ GEM
railties (>= 4.1.0)
responders
warden (~> 1.2.3)
devise-two-factor (4.0.2)
activesupport (< 7.1)
attr_encrypted (>= 1.3, < 4, != 2)
devise-two-factor (4.1.1)
activesupport (~> 7.0)
attr_encrypted (>= 1.3, < 5, != 2)
devise (~> 4.0)
railties (< 7.1)
railties (~> 7.0)
rotp (~> 6.0)
diff-lcs (1.5.0)
diff_match_patch (0.1.0)
@ -505,7 +506,7 @@ GEM
mail (~> 2.7)
encryptor (3.0.0)
erubi (1.12.0)
escape_utils (1.2.1)
escape_utils (1.3.0)
et-orbi (1.2.7)
tzinfo
ethon (0.16.0)
@ -515,8 +516,8 @@ GEM
expgen (0.1.1)
parslet
expression_parser (0.9.0)
extended-markdown-filter (0.6.0)
html-pipeline (~> 2.0)
extended-markdown-filter (0.7.0)
html-pipeline (~> 2.9)
factory_bot (6.2.0)
activesupport (>= 5.0.0)
factory_bot_rails (6.2.0)
@ -804,18 +805,18 @@ GEM
faraday_middleware
graphql-client
graphlyte (1.0.0)
graphql (1.13.19)
graphql-client (0.17.0)
graphql (2.0.27)
graphql-client (0.18.0)
activesupport (>= 3.0)
graphql (~> 1.10)
graphql-docs (2.1.0)
commonmarker (~> 0.16)
graphql
graphql-docs (4.0.0)
commonmarker (~> 0.23, >= 0.23.6)
dartsass (~> 1.49)
escape_utils (~> 1.2)
extended-markdown-filter (~> 0.4)
gemoji (~> 3.0)
graphql (~> 1.12)
html-pipeline (~> 2.9)
sass (~> 3.4)
graphql (~> 2.0)
html-pipeline (~> 2.14, >= 2.14.3)
grpc (1.58.0)
google-protobuf (~> 3.23)
googleapis-common-protos-types (~> 1.0)
@ -1336,7 +1337,7 @@ GEM
nokogiri
rexml (3.2.6)
rinku (2.0.0)
rotp (6.2.0)
rotp (6.3.0)
rouge (4.1.3)
rqrcode (2.2.0)
chunky_png (~> 1.0)
@ -1440,11 +1441,6 @@ GEM
sanitize (6.0.2)
crass (~> 1.0.2)
nokogiri (>= 1.12.0)
sass (3.5.5)
sass-listen (~> 4.0.0)
sass-listen (4.0.0)
rb-fsevent (~> 0.9, >= 0.9.4)
rb-inotify (~> 0.9, >= 0.9.7)
sassc (2.4.0)
ffi (~> 1.9)
sassc-rails (2.1.0)
@ -1784,7 +1780,7 @@ DEPENDENCIES
device_detector
devise (~> 4.8.1)
devise-pbkdf2-encryptable (~> 0.0.0)!
devise-two-factor (~> 4.0.2)
devise-two-factor (~> 4.1.1)
diff_match_patch (~> 0.1.0)
diffy (~> 3.4)
discordrb-webhooks (~> 3.4)
@ -1859,8 +1855,8 @@ DEPENDENCIES
graphiql-rails (~> 1.8.0)
graphlient (~> 0.5.0)
graphlyte (~> 1.0.0)
graphql (~> 1.13.19)
graphql-docs (~> 2.1.0)
graphql (~> 2.0.27)
graphql-docs (~> 4.0.0)
grpc (~> 1.58.0)
gssapi (~> 1.3.1)
guard-rspec

View File

@ -2,6 +2,9 @@
// eslint-disable-next-line no-restricted-imports
import { mapActions, mapState } from 'vuex';
import Tracking from '~/tracking';
import setSelectedBoardItemsMutation from '~/boards/graphql/client/set_selected_board_items.mutation.graphql';
import unsetSelectedBoardItemsMutation from '~/boards/graphql/client/unset_selected_board_items.mutation.graphql';
import selectedBoardItemsQuery from '~/boards/graphql/client/selected_board_items.query.graphql';
import setActiveBoardItemMutation from 'ee_else_ce/boards/graphql/client/set_active_board_item.mutation.graphql';
import activeBoardItemQuery from 'ee_else_ce/boards/graphql/client/active_board_item.query.graphql';
import BoardCardInner from './board_card_inner.vue';
@ -52,9 +55,12 @@ export default {
return !this.isApolloBoard;
},
},
selectedBoardItems: {
query: selectedBoardItemsQuery,
},
},
computed: {
...mapState(['selectedBoardItems', 'activeId']),
...mapState(['activeId']),
activeItemId() {
return this.isApolloBoard ? this.activeBoardItem?.id : this.activeId;
},
@ -62,10 +68,7 @@ export default {
return this.item.id === this.activeItemId;
},
multiSelectVisible() {
return (
!this.activeItemId &&
this.selectedBoardItems.findIndex((boardItem) => boardItem.id === this.item.id) > -1
);
return !this.activeItemId && this.selectedBoardItems?.includes(this.item.id);
},
isDisabled() {
return this.disabled || !this.item.id || this.item.isLoading || !this.canAdmin;
@ -93,7 +96,7 @@ export default {
},
},
methods: {
...mapActions(['toggleBoardItemMultiSelection', 'toggleBoardItem']),
...mapActions(['toggleBoardItem']),
toggleIssue(e) {
// Don't do anything if this happened on a no trigger element
if (e.target.closest('.js-no-trigger')) return;
@ -110,7 +113,10 @@ export default {
this.track('click_card', { label: 'right_sidebar' });
}
},
toggleItem() {
async toggleItem() {
await this.$apollo.mutate({
mutation: unsetSelectedBoardItemsMutation,
});
this.$apollo.mutate({
mutation: setActiveBoardItemMutation,
variables: {
@ -119,6 +125,26 @@ export default {
},
});
},
async toggleBoardItemMultiSelection(item) {
if (this.activeItemId) {
await this.$apollo.mutate({
mutation: setSelectedBoardItemsMutation,
variables: {
itemId: this.activeItemId,
},
});
await this.$apollo.mutate({
mutation: setActiveBoardItemMutation,
variables: { boardItem: null },
});
}
this.$apollo.mutate({
mutation: setSelectedBoardItemsMutation,
variables: {
itemId: item.id,
},
});
},
},
};
</script>

View File

@ -0,0 +1,3 @@
query selectedBoardItems {
selectedBoardItems @client
}

View File

@ -0,0 +1,3 @@
mutation setSelectedBoardItems($itemId: ID!) {
setSelectedBoardItems(itemId: $itemId) @client
}

View File

@ -0,0 +1,3 @@
mutation unsetSelectedBoardItems {
unsetSelectedBoardItems @client
}

View File

@ -3,6 +3,7 @@ import VueApollo from 'vue-apollo';
import { defaultDataIdFromObject } from '@apollo/client/core';
import { concatPagination } from '@apollo/client/utilities';
import errorQuery from '~/boards/graphql/client/error.query.graphql';
import selectedBoardItemsQuery from '~/boards/graphql/client/selected_board_items.query.graphql';
import isShowingLabelsQuery from '~/graphql_shared/client/is_showing_labels.query.graphql';
import getIssueStateQuery from '~/issues/show/queries/get_issue_state.query.graphql';
import createDefaultClient from '~/lib/graphql';
@ -28,6 +29,11 @@ export const config = {
return currentState ?? true;
},
},
selectedBoardItems: {
read(currentState) {
return currentState ?? [];
},
},
},
},
Project: {
@ -218,6 +224,11 @@ export const config = {
return currentState ?? true;
},
},
selectedBoardItems: {
read(currentState) {
return currentState ?? [];
},
},
},
},
}
@ -242,6 +253,21 @@ export const resolvers = {
});
return boardItem;
},
setSelectedBoardItems(_, { itemId }, { cache }) {
const sourceData = cache.readQuery({ query: selectedBoardItemsQuery });
cache.writeQuery({
query: selectedBoardItemsQuery,
data: { selectedBoardItems: [...sourceData.selectedBoardItems, itemId] },
});
return [...sourceData.selectedBoardItems, itemId];
},
unsetSelectedBoardItems(_, _variables, { cache }) {
cache.writeQuery({
query: selectedBoardItemsQuery,
data: { selectedBoardItems: [] },
});
return [];
},
setError(_, { error }, { cache }) {
cache.writeQuery({
query: errorQuery,

View File

@ -262,6 +262,7 @@ export default {
{{ __('No matches found') }}
</gl-dropdown-text>
<gl-dropdown-text v-else-if="hasFetched">{{ __('No suggestions found') }}</gl-dropdown-text>
<slot name="footer"></slot>
</template>
</gl-filtered-search-token>
</template>

View File

@ -22,8 +22,12 @@ export default {
WorkItemCommentForm,
},
mixins: [Tracking.mixin()],
inject: ['fullPath', 'isGroup'],
inject: ['isGroup'],
props: {
fullPath: {
type: String,
required: true,
},
workItemId: {
type: String,
required: true,

View File

@ -35,7 +35,6 @@ export default {
GlTooltip: GlTooltipDirective,
},
mixins: [Tracking.mixin()],
inject: ['fullPath'],
props: {
workItemId: {
type: String,

View File

@ -18,8 +18,11 @@ export default {
DiscussionNotesRepliesWrapper,
WorkItemNoteReplying,
},
inject: ['fullPath'],
props: {
fullPath: {
type: String,
required: true,
},
workItemId: {
type: String,
required: true,
@ -154,6 +157,7 @@ export default {
:is-first-note="true"
:note="note"
:discussion-id="discussionId"
:full-path="fullPath"
:has-replies="hasReplies"
:work-item-type="workItemType"
:is-modal="isModal"
@ -180,6 +184,7 @@ export default {
:is-first-note="true"
:note="note"
:discussion-id="discussionId"
:full-path="fullPath"
:has-replies="hasReplies"
:work-item-type="workItemType"
:is-modal="isModal"
@ -207,6 +212,7 @@ export default {
<work-item-note
:key="threadKey(reply)"
:discussion-id="discussionId"
:full-path="fullPath"
:note="reply"
:work-item-type="workItemType"
:is-modal="isModal"
@ -231,6 +237,7 @@ export default {
v-if="shouldShowReplyForm"
:notes-form="false"
:autofocus="autofocus"
:full-path="fullPath"
:work-item-id="workItemId"
:work-item-iid="workItemIid"
:discussion-id="discussionId"

View File

@ -36,8 +36,12 @@ export default {
EditedAt,
},
mixins: [Tracking.mixin()],
inject: ['fullPath', 'isGroup'],
inject: ['isGroup'],
props: {
fullPath: {
type: String,
required: true,
},
workItemId: {
type: String,
required: true,
@ -338,6 +342,7 @@ export default {
</note-header>
<div class="gl-display-inline-flex">
<note-actions
:full-path="fullPath"
:show-award-emoji="hasAwardEmojiPermission"
:work-item-iid="workItemIid"
:note="note"
@ -375,7 +380,12 @@ export default {
/>
</div>
<div class="note-awards" :class="isFirstNote ? '' : 'gl-pl-7'">
<work-item-note-awards-list :note="note" :work-item-iid="workItemIid" :is-modal="isModal" />
<work-item-note-awards-list
:full-path="fullPath"
:note="note"
:work-item-iid="workItemIid"
:is-modal="isModal"
/>
</div>
</div>
</timeline-entry-item>

View File

@ -33,8 +33,11 @@ export default {
directives: {
GlTooltip: GlTooltipDirective,
},
inject: ['fullPath'],
props: {
fullPath: {
type: String,
required: true,
},
workItemIid: {
type: String,
required: true,

View File

@ -8,8 +8,11 @@ export default {
components: {
AwardsList,
},
inject: ['fullPath'],
props: {
fullPath: {
type: String,
required: true,
},
workItemIid: {
type: String,
required: true,

View File

@ -364,6 +364,7 @@ export default {
<work-item-discussion
:key="getDiscussionKey(discussion)"
:discussion="discussion.notes.nodes"
:full-path="fullPath"
:work-item-id="workItemId"
:work-item-iid="workItemIid"
:work-item-type="workItemType"

View File

@ -11,24 +11,23 @@ module Resolvers
@requires_argument = true
end
def self.requires_argument?
!!@requires_argument
end
def self.calls_gitaly!
@calls_gitaly = true
end
# This is a flag to allow us to use `complexity_multiplier` to compute complexity for connection
# fields(see BaseField#connection_complexity_multiplier) in resolvers that do external connection pagination,
# thus disabling the default `connection` option(see self.field_options method above).
def self.calculate_ext_conn_complexity
false
def self.calls_gitaly?
!!@calls_gitaly
end
def self.field_options
extra_options = {
requires_argument: @requires_argument,
calls_gitaly: @calls_gitaly
}.compact
super.merge(extra_options)
# This is a flag to allow us to use `complexity_multiplier` to compute complexity for connection
# fields(see BaseField#connection_complexity_multiplier) in resolvers that do external connection pagination,
# thus disabling the default `connection` option.
def self.calculate_ext_conn_complexity
false
end
def self.singular_type

View File

@ -7,7 +7,6 @@ module Types
attr_reader :doc_reference
def initialize(*args, **kwargs, &block)
init_gitlab_deprecation(kwargs)
@doc_reference = kwargs.delete(:see)
# our custom addition `nullable` which allows us to declare

View File

@ -5,12 +5,6 @@ module Types
class BaseEnum < GraphQL::Schema::Enum
class CustomValue < GraphQL::Schema::EnumValue
include Gitlab::Graphql::Deprecations
def initialize(name, desc = nil, **kwargs)
init_gitlab_deprecation(kwargs)
super(name, desc, **kwargs)
end
end
enum_value_class(CustomValue)

View File

@ -11,13 +11,15 @@ module Types
attr_reader :doc_reference
def initialize(**kwargs, &block)
init_gitlab_deprecation(kwargs)
@calls_gitaly = !!kwargs.delete(:calls_gitaly)
@requires_argument = kwargs.delete(:requires_argument)
@calls_gitaly = kwargs.delete(:calls_gitaly)
@doc_reference = kwargs.delete(:see)
@constant_complexity = kwargs[:complexity].is_a?(Integer) && kwargs[:complexity] > 0
@requires_argument = !!kwargs.delete(:requires_argument)
given_complexity = kwargs[:complexity] || kwargs[:resolver_class].try(:complexity)
@constant_complexity = given_complexity.is_a?(Integer) && given_complexity > 0
kwargs[:complexity] = field_complexity(kwargs[:resolver_class], given_complexity)
@authorize = Array.wrap(kwargs.delete(:authorize))
kwargs[:complexity] = field_complexity(kwargs[:resolver_class], kwargs[:complexity])
after_connection_extensions = kwargs.delete(:late_extensions) || []
super(**kwargs, &block)
@ -31,11 +33,12 @@ module Types
end
def may_call_gitaly?
@constant_complexity || @calls_gitaly
@constant_complexity || calls_gitaly?
end
def requires_argument?
@requires_argument || arguments.values.any? { |argument| argument.type.non_null? }
value = @requires_argument.nil? ? @resolver_class.try(:requires_argument?) : @requires_argument
!!value || arguments.values.any? { |argument| argument.type.non_null? }
end
# By default fields authorize against the current object, but that is not how our
@ -82,7 +85,7 @@ module Types
end
def calls_gitaly?
@calls_gitaly
!!(@calls_gitaly.nil? ? @resolver_class.try(:calls_gitaly?) : @calls_gitaly)
end
def constant_complexity?

View File

@ -166,3 +166,5 @@ class Ability
end
end
end
Ability.prepend_mod_with('AbilityPrepend')

View File

@ -11,6 +11,8 @@ module WorkItems
belongs_to :source, class_name: 'WorkItem'
belongs_to :target, class_name: 'WorkItem'
validate :validate_related_link_restrictions
class << self
extend ::Gitlab::Utils::Override
@ -25,5 +27,40 @@ module WorkItems
'work item'
end
end
private
def validate_related_link_restrictions
return unless source && target
source_type = source.work_item_type
target_type = target.work_item_type
return if link_restriction_exists?(source_type.id, target_type.id)
errors.add :source, format(
s_('%{source_type} cannot be related to %{type_type}'),
source_type: source_type.name.downcase.pluralize,
type_type: target_type.name.downcase.pluralize
)
end
def link_restriction_exists?(source_type_id, target_type_id)
source_restriction = find_restriction(source_type_id, target_type_id)
return true if source_restriction.present?
return false if source_type_id == target_type_id
find_restriction(target_type_id, source_type_id).present?
end
def find_restriction(source_type_id, target_type_id)
::WorkItems::RelatedLinkRestriction.find_by_source_type_id_and_target_type_id_and_link_type(
source_type_id,
target_type_id,
link_type
)
end
end
end
WorkItems::RelatedWorkItemLink.prepend_mod

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddDuplicateIndexRuleTypeFourAndApplicableColumn < Gitlab::Database::Migration[2.1]
INDEX_NAME = 'unique_any_approver_merge_request_rule_type_post_merge'
disable_ddl_transaction!
def up
add_concurrent_index :approval_merge_request_rules, [:merge_request_id, :rule_type, :applicable_post_merge],
where: 'rule_type = 4', name: INDEX_NAME, unique: true
end
def down
remove_concurrent_index_by_name :approval_merge_request_rules, INDEX_NAME
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class RemoveTempIndexForProjectStatisticsPipelineArtifactsSizeMigration < Gitlab::Database::Migration[2.1]
INDEX_PROJECT_STATSISTICS_PIPELINE_ARTIFACTS_SIZE = 'tmp_index_project_statistics_pipeline_artifacts_size'
disable_ddl_transaction!
def up
remove_concurrent_index_by_name :project_statistics, INDEX_PROJECT_STATSISTICS_PIPELINE_ARTIFACTS_SIZE
end
def down
add_concurrent_index :project_statistics, [:project_id],
name: INDEX_PROJECT_STATSISTICS_PIPELINE_ARTIFACTS_SIZE,
where: "pipeline_artifacts_size != 0"
end
end

View File

@ -0,0 +1,46 @@
# frozen_string_literal: true
class SwapColumnsForCiPipelineVariablesPipelineIdBigint < Gitlab::Database::Migration[2.1]
include ::Gitlab::Database::MigrationHelpers::Swapping
disable_ddl_transaction!
def up
swap
end
def down
swap
end
private
def swap
# rubocop:disable Migration/WithLockRetriesDisallowedMethod
with_lock_retries(raise_on_exhaustion: true) do
lock_tables(:ci_pipelines, :ci_pipeline_variables)
swap_columns(
:ci_pipeline_variables,
:pipeline_id,
:pipeline_id_convert_to_bigint
)
reset_trigger_function(:trigger_7f3d66a7d7f5)
swap_columns_default(
:ci_pipeline_variables,
:pipeline_id,
:pipeline_id_convert_to_bigint
)
swap_foreign_keys(
:ci_pipeline_variables,
:fk_f29c5f4380,
:temp_fk_rails_8d3b04e3e1
)
swap_indexes(
:ci_pipeline_variables,
:index_ci_pipeline_variables_on_pipeline_id_and_key,
:index_ci_pipeline_variables_on_pipeline_id_bigint_and_key
)
end
# rubocop:enable Migration/WithLockRetriesDisallowedMethod
end
end

View File

@ -0,0 +1,18 @@
# frozen_string_literal: true
class AddIndexOnProjectsForAdjournedDeletion < Gitlab::Database::Migration[2.1]
disable_ddl_transaction!
INDEX_NAME = 'index_projects_id_for_aimed_for_deletion'
def up
add_concurrent_index :projects,
[:id, :marked_for_deletion_at],
where: 'marked_for_deletion_at IS NOT NULL AND pending_delete = false',
name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :projects, INDEX_NAME
end
end

View File

@ -0,0 +1 @@
55747d0be3472797422175e666d95022b21a1c42b017c1a72de4c8bdcbdba6c4

View File

@ -0,0 +1 @@
df947a6c033aaf3212743fc9e9d1b82c84d055618f211553422fbdbf839a41a6

View File

@ -0,0 +1 @@
1cda91e874cd6e0eb26f641ba00fce8afa7236f3008192711b4479e3ee7bb91b

View File

@ -0,0 +1 @@
e0c5245e0a7ce9daa5ffb609fb1d9f5b19f438320924d989011fcccc31b2594e

View File

@ -13886,12 +13886,12 @@ CREATE TABLE ci_pipeline_variables (
encrypted_value text,
encrypted_value_salt character varying,
encrypted_value_iv character varying,
pipeline_id integer NOT NULL,
pipeline_id_convert_to_bigint integer DEFAULT 0 NOT NULL,
variable_type smallint DEFAULT 1 NOT NULL,
partition_id bigint NOT NULL,
raw boolean DEFAULT false NOT NULL,
id bigint NOT NULL,
pipeline_id_convert_to_bigint bigint DEFAULT 0 NOT NULL
pipeline_id bigint NOT NULL
);
CREATE SEQUENCE ci_pipeline_variables_id_seq
@ -33760,6 +33760,8 @@ CREATE INDEX index_projects_api_vis20_path ON projects USING btree (path, id) WH
CREATE INDEX index_projects_api_vis20_updated_at ON projects USING btree (updated_at, id) WHERE (visibility_level = 20);
CREATE INDEX index_projects_id_for_aimed_for_deletion ON projects USING btree (id, marked_for_deletion_at) WHERE ((marked_for_deletion_at IS NOT NULL) AND (pending_delete = false));
CREATE INDEX index_projects_not_aimed_for_deletion ON projects USING btree (id) WHERE (marked_for_deletion_at IS NULL);
CREATE INDEX index_projects_on_created_at_and_id ON projects USING btree (created_at, id);
@ -34932,8 +34934,6 @@ CREATE INDEX tmp_index_on_vulnerabilities_non_dismissed ON vulnerabilities USING
CREATE INDEX tmp_index_project_statistics_cont_registry_size ON project_statistics USING btree (project_id) WHERE (container_registry_size = 0);
CREATE INDEX tmp_index_project_statistics_pipeline_artifacts_size ON project_statistics USING btree (project_id) WHERE (pipeline_artifacts_size <> 0);
CREATE INDEX tmp_index_project_statistics_updated_at ON project_statistics USING btree (project_id, updated_at) WHERE (repository_size > 0);
CREATE INDEX tmp_index_vulnerability_dismissal_info ON vulnerabilities USING btree (id) WHERE ((state = 2) AND ((dismissed_at IS NULL) OR (dismissed_by_id IS NULL)));
@ -34968,6 +34968,8 @@ CREATE UNIQUE INDEX unique_amazon_s3_configurations_namespace_id_and_bucket_name
CREATE UNIQUE INDEX unique_amazon_s3_configurations_namespace_id_and_name ON audit_events_amazon_s3_configurations USING btree (namespace_id, name);
CREATE UNIQUE INDEX unique_any_approver_merge_request_rule_type_post_merge ON approval_merge_request_rules USING btree (merge_request_id, rule_type, applicable_post_merge) WHERE (rule_type = 4);
CREATE UNIQUE INDEX unique_batched_background_migrations_queued_migration_version ON batched_background_migrations USING btree (queued_migration_version);
CREATE UNIQUE INDEX unique_ci_builds_token_encrypted_and_partition_id ON ci_builds USING btree (token_encrypted, partition_id) WHERE (token_encrypted IS NOT NULL);

View File

@ -68,7 +68,7 @@ module Gitlab
end
def provider_config
Gitlab::Auth::OAuth::Provider.config_for(@provider) || {}
Gitlab::Auth::OAuth::Provider.config_for(provider) || {}
end
def provider_args

View File

@ -1209,6 +1209,10 @@ into similar problems in the future (e.g. when new tables are created).
end
end
def lock_tables(*tables, mode: :access_exclusive)
execute("LOCK TABLE #{tables.join(', ')} IN #{mode.to_s.upcase.tr('_', ' ')} MODE")
end
private
def multiple_columns(columns, separator: ', ')

View File

@ -0,0 +1,50 @@
# frozen_string_literal: true
module Gitlab
module Database
module MigrationHelpers
module Swapping
def reset_trigger_function(function_name)
execute("ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL")
end
def swap_columns(table, column1, column2)
::Gitlab::Database::Migrations::SwapColumns.new(
migration_context: self,
table: table,
column1: column1,
column2: column2
).execute
end
def swap_columns_default(table, column1, column2)
::Gitlab::Database::Migrations::SwapColumnsDefault.new(
migration_context: self,
table: table,
column1: column1,
column2: column2
).execute
end
def swap_foreign_keys(table, foreign_key1, foreign_key2)
rename_constraint(table, foreign_key1, :temp_name_for_renaming)
rename_constraint(table, foreign_key2, foreign_key1)
rename_constraint(table, :temp_name_for_renaming, foreign_key2)
end
def swap_indexes(table, index1, index2)
identifier = "index_#{index1}_on_#{table}"
# Check Gitlab::Database::MigrationHelpers#concurrent_foreign_key_name()
# for info on why we use a hash
hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
temp_index = "temp_#{hashed_identifier}"
rename_index(table, index1, temp_index)
rename_index(table, index2, index1)
rename_index(table, temp_index, index2)
end
end
end
end
end

View File

@ -0,0 +1,39 @@
# frozen_string_literal: true
module Gitlab
module Database
module Migrations
class SwapColumns
delegate :quote_table_name, :quote_column_name, :clear_cache!, to: :@migration_context
def initialize(migration_context:, table:, column1:, column2:)
@migration_context = migration_context
@table = table
@column_name1 = column1
@column_name2 = column2
end
def execute
rename_column(@table, @column_name1, :temp_name_for_renaming)
rename_column(@table, @column_name2, @column_name1)
rename_column(@table, :temp_name_for_renaming, @column_name2)
end
private
# Rails' `rename_column` will rename related indexes
# using a format e.g. `index_{TABLE_NAME}_on_{KEY1}_and_{KEY2}`
# This will break the migration if the formated index name is longer than 63 chars, e.g.
# `index_ci_pipeline_variables_on_pipeline_id_convert_to_bigint_and_key`
# Therefore, we need to duplicate what Rails has done here without the part renaming related indexes
def rename_column(table_name, column_name, column2_name)
clear_cache!
@migration_context.execute <<~SQL
ALTER TABLE #{quote_table_name(table_name)}
RENAME COLUMN #{quote_column_name(column_name)} TO #{quote_column_name(column2_name)}
SQL
end
end
end
end
end

View File

@ -0,0 +1,62 @@
# frozen_string_literal: true
module Gitlab
module Database
module Migrations
class SwapColumnsDefault
delegate(
:change_column_default, :quote_table_name, :quote_column_name, :column_for,
to: :@migration_context
)
def initialize(migration_context:, table:, column1:, column2:)
@migration_context = migration_context
@table = table
@column_name1 = column1
@column_name2 = column2
end
def execute
default1 = find_default_by(@column_name1)
default2 = find_default_by(@column_name2)
return if default1 == default2
change_sequence_owner_if(default1[:sequence_name], @column_name2)
change_sequence_owner_if(default2[:sequence_name], @column_name1)
change_column_default(@table, @column_name1, default2[:default])
change_column_default(@table, @column_name2, default1[:default])
end
private
def change_sequence_owner_if(sequence_name, column_name)
return if sequence_name.blank?
@migration_context.execute(<<~SQL.squish)
ALTER SEQUENCE #{quote_table_name(sequence_name)}
OWNED BY #{quote_table_name(@table)}.#{quote_column_name(column_name)}
SQL
end
def find_default_by(name)
column = column_for(@table, name)
if column.default_function.present?
{
default: -> { column.default_function },
sequence_name: extract_sequence_name_from(column.default_function)
}
else
{
default: column.default
}
end
end
def extract_sequence_name_from(expression)
expression[/nextval\('([^']+)'/, 1]
end
end
end
end
end

View File

@ -46,7 +46,7 @@ module Gitlab
end
def after_resolve(value:, context:, **rest)
return value if value.is_a?(GraphQL::Execution::Execute::Skip)
return value if value.is_a?(GraphQL::Execution::Skip)
if @field.connection?
redact_connection(value, context)

View File

@ -11,6 +11,14 @@ module Gitlab
attr_accessor :deprecation
end
def initialize(*args, **kwargs, &block)
init_gitlab_deprecation(kwargs)
super
update_deprecation_description
end
def visible?(ctx)
super && ctx[:remove_deprecated] == true ? deprecation.nil? : true
end
@ -37,7 +45,12 @@ module Gitlab
end
kwargs[:deprecation_reason] = deprecation.deprecation_reason
kwargs[:description] = deprecation.edit_description(kwargs[:description])
end
def update_deprecation_description
return if deprecation.nil?
description(deprecation.edit_description(description))
end
end
end

View File

@ -1134,6 +1134,12 @@ msgstr ""
msgid "%{source_project_path}:%{source_branch}"
msgstr ""
msgid "%{source_type} cannot be related to %{type_type}"
msgstr ""
msgid "%{source_type} cannot block %{type_type}"
msgstr ""
msgid "%{source} %{copyButton} into %{target}"
msgstr ""
@ -5488,6 +5494,9 @@ msgstr ""
msgid "Analytics|Custom dashboards"
msgstr ""
msgid "Analytics|Custom events"
msgstr ""
msgid "Analytics|Dashboard not found"
msgstr ""
@ -5530,6 +5539,12 @@ msgstr ""
msgid "Analytics|Error while saving visualization."
msgstr ""
msgid "Analytics|Event Name"
msgstr ""
msgid "Analytics|Event Props"
msgstr ""
msgid "Analytics|Failed to fetch data"
msgstr ""
@ -5635,6 +5650,12 @@ msgstr ""
msgid "Analytics|Use the visualization designer to create custom visualizations. After you save a visualization, you can add it to a dashboard."
msgstr ""
msgid "Analytics|User Id"
msgstr ""
msgid "Analytics|User Props"
msgstr ""
msgid "Analytics|Users"
msgstr ""
@ -27832,6 +27853,9 @@ msgstr ""
msgid "LearnGitLab|Set up your workspace"
msgstr ""
msgid "LearnGitLab|Start Learning GitLab"
msgstr ""
msgid "LearnGitLab|Start a free trial of GitLab Ultimate"
msgstr ""
@ -50022,9 +50046,18 @@ msgstr ""
msgid "Tracing|Operation"
msgstr ""
msgid "Tracing|Select a service to load suggestions"
msgstr ""
msgid "Tracing|Service"
msgstr ""
msgid "Tracing|Something went wrong while fetching the operations"
msgstr ""
msgid "Tracing|Something went wrong while fetching the services"
msgstr ""
msgid "Tracing|Span Details"
msgstr ""
@ -50148,6 +50181,27 @@ msgstr ""
msgid "Trending"
msgstr ""
msgid "TrialBenefits|Container Scanning"
msgstr ""
msgid "TrialBenefits|Dynamic Applications Security Testing"
msgstr ""
msgid "TrialBenefits|Multi-Level Epics"
msgstr ""
msgid "TrialBenefits|Security Dashboards"
msgstr ""
msgid "TrialBenefits|Static Application Security Testing"
msgstr ""
msgid "TrialBenefits|Suggested Reviewers"
msgstr ""
msgid "TrialBenefits|Vulnerability Management"
msgstr ""
msgid "TrialRegistration|Start GitLab Ultimate free trial"
msgstr ""
@ -50163,6 +50217,9 @@ msgstr ""
msgid "Trials|Compare all plans"
msgstr ""
msgid "Trials|Congrats on starting your 30-day free trial!"
msgstr ""
msgid "Trials|Create a new group to start your GitLab Ultimate trial."
msgstr ""
@ -50172,12 +50229,18 @@ msgstr ""
msgid "Trials|Looking to do more with GitLab?"
msgstr ""
msgid "Trials|Trials benefits"
msgstr ""
msgid "Trials|Upgrade your plan for more security features"
msgstr ""
msgid "Trials|With GitLab Ultimate you can detect and address vulnerabilities in your application."
msgstr ""
msgid "Trials|With GitLab Ultimate, you'll have access to:"
msgstr ""
msgid "Trials|You can apply your trial to a new group or an existing group."
msgstr ""

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Manage', :requires_admin, product_group: :import_and_integrate do
RSpec.describe 'Manage', :github, :requires_admin, product_group: :import_and_integrate do
describe 'GitHub import' do
include_context 'with github import'

View File

@ -4,11 +4,13 @@ import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import VueApollo from 'vue-apollo';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import BoardCard from '~/boards/components/board_card.vue';
import BoardCardInner from '~/boards/components/board_card_inner.vue';
import { inactiveId } from '~/boards/constants';
import selectedBoardItemsQuery from '~/boards/graphql/client/selected_board_items.query.graphql';
import isShowingLabelsQuery from '~/graphql_shared/client/is_showing_labels.query.graphql';
import { mockLabelList, mockIssue, DEFAULT_COLOR } from '../mock_data';
@ -21,9 +23,11 @@ describe('Board card', () => {
Vue.use(VueApollo);
const mockSetActiveBoardItemResolver = jest.fn();
const mockSetSelectedBoardItemsResolver = jest.fn();
const mockApollo = createMockApollo([], {
Mutation: {
setActiveBoardItem: mockSetActiveBoardItemResolver,
setSelectedBoardItems: mockSetSelectedBoardItemsResolver,
},
});
@ -50,6 +54,7 @@ describe('Board card', () => {
provide = {},
stubs = { BoardCardInner },
item = mockIssue,
selectedBoardItems = [],
} = {}) => {
mockApollo.clients.defaultClient.cache.writeQuery({
query: isShowingLabelsQuery,
@ -57,6 +62,12 @@ describe('Board card', () => {
isShowingLabels: true,
},
});
mockApollo.clients.defaultClient.cache.writeQuery({
query: selectedBoardItemsQuery,
data: {
selectedBoardItems,
},
});
wrapper = shallowMountExtended(BoardCard, {
apolloProvider: mockApollo,
@ -140,10 +151,9 @@ describe('Board card', () => {
createStore({
initialState: {
activeId: inactiveId,
selectedBoardItems: [mockIssue],
},
});
mountComponent();
mountComponent({ selectedBoardItems: [mockIssue.id] });
expect(wrapper.classes()).toContain('multi-select');
expect(wrapper.classes()).not.toContain('is-active');
@ -171,13 +181,17 @@ describe('Board card', () => {
window.gon = { features: { boardMultiSelect: true } };
});
it('should call vuex action "multiSelectBoardItem" with correct parameters', async () => {
it('should call setSelectedBoardItemsMutation with correct parameters', async () => {
await multiSelectCard();
expect(mockActions.toggleBoardItemMultiSelection).toHaveBeenCalledTimes(1);
expect(mockActions.toggleBoardItemMultiSelection).toHaveBeenCalledWith(
expect(mockSetSelectedBoardItemsResolver).toHaveBeenCalledTimes(1);
expect(mockSetSelectedBoardItemsResolver).toHaveBeenCalledWith(
expect.any(Object),
mockIssue,
{
itemId: mockIssue.id,
},
expect.anything(),
expect.anything(),
);
});
});
@ -248,6 +262,7 @@ describe('Board card', () => {
it('set active board item on client when clicking on card', async () => {
await selectCard();
await waitForPromises();
expect(mockSetActiveBoardItemResolver).toHaveBeenCalledWith(
{},

View File

@ -420,6 +420,12 @@ describe('BaseToken', () => {
expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('renders `footer` slot when present', () => {
wrapper = createComponent({ slots: { footer: "<div class='custom-footer' />" } });
expect(wrapper.find('.custom-footer').exists()).toBe(true);
});
describe('events', () => {
describe('when activeToken has been selected', () => {
beforeEach(() => {

View File

@ -65,10 +65,10 @@ describe('Work item add note', () => {
[createNoteMutation, mutationHandler],
]),
provide: {
fullPath: 'test-project-path',
isGroup,
},
propsData: {
fullPath: 'test-project-path',
workItemId: id,
workItemIid,
workItemType,

View File

@ -48,6 +48,7 @@ describe('Work item comment form component', () => {
} = {}) => {
wrapper = shallowMount(WorkItemCommentForm, {
propsData: {
fullPath: 'test-project-path',
workItemState,
workItemId,
workItemType,
@ -59,9 +60,6 @@ describe('Work item comment form component', () => {
autocompleteDataSources: {},
isNewDiscussion,
},
provide: {
fullPath: 'test-project-path',
},
directives: {
GlTooltip: createMockDirective('gl-tooltip'),
},

View File

@ -31,10 +31,8 @@ describe('Work Item Discussion', () => {
workItemType = 'Task',
} = {}) => {
wrapper = shallowMount(WorkItemDiscussion, {
provide: {
fullPath: 'gitlab-org',
},
propsData: {
fullPath: 'gitlab-org',
discussion,
workItemId,
workItemIid: '1',

View File

@ -48,6 +48,7 @@ describe('Work Item Note Actions', () => {
} = {}) => {
wrapper = shallowMountExtended(WorkItemNoteActions, {
propsData: {
fullPath: 'gitlab-org',
showReply,
showEdit,
workItemIid: '1',
@ -63,7 +64,6 @@ describe('Work Item Note Actions', () => {
projectName,
},
provide: {
fullPath: 'gitlab-org',
glFeatures: {
workItemsMvc2: true,
},

View File

@ -61,10 +61,8 @@ describe('Work Item Note Awards List', () => {
});
wrapper = shallowMount(WorkItemNoteAwardsList, {
provide: {
fullPath,
},
propsData: {
fullPath,
workItemIid,
note,
isModal: false,

View File

@ -104,10 +104,10 @@ describe('Work Item Note', () => {
} = {}) => {
wrapper = shallowMount(WorkItemNote, {
provide: {
fullPath: 'test-project-path',
isGroup,
},
propsData: {
fullPath: 'test-project-path',
workItemId,
workItemIid: '1',
note,

View File

@ -443,7 +443,7 @@ RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
end
end
describe 'Authorization on GraphQL::Execution::Execute::SKIP' do
describe 'Authorization on GraphQL::Execution::SKIP' do
let(:type) do
type_factory do |type|
type.authorize permission_single
@ -452,7 +452,7 @@ RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
let(:query_type) do
query_factory do |query|
query.field :item, [type], null: true, resolver: new_resolver(GraphQL::Execution::Execute::SKIP)
query.field :item, [type], null: true, resolver: new_resolver(GraphQL::Execution::SKIP)
end
end

View File

@ -118,18 +118,16 @@ RSpec.describe Resolvers::BaseResolver, feature_category: :api do
end
it 'does not apply the block to the resolver' do
expect(resolver.field_options).to include(
arguments: be_empty
)
expect(resolver.arguments).to be_empty
result = resolve(resolver)
expect(result).to eq([1])
end
it 'applies the block to the single version of the resolver' do
expect(resolver.single.field_options).to include(
arguments: match('foo' => an_instance_of(::Types::BaseArgument))
)
expect(resolver.single.arguments).to match('foo' => an_instance_of(::Types::BaseArgument))
result = resolve(resolver.single, args: { foo: 7 })
expect(result).to eq(49)
@ -155,9 +153,8 @@ RSpec.describe Resolvers::BaseResolver, feature_category: :api do
end
it 'applies both blocks to the single version of the resolver' do
expect(resolver.single.field_options).to include(
arguments: match('foo' => ::Types::BaseArgument, 'bar' => ::Types::BaseArgument)
)
expect(resolver.single.arguments).to match('foo' => ::Types::BaseArgument, 'bar' => ::Types::BaseArgument)
result = resolve(resolver.single, args: { foo: 7, bar: 5 })
expect(result).to eq(35)
@ -178,12 +175,9 @@ RSpec.describe Resolvers::BaseResolver, feature_category: :api do
end
it 'applies both blocks to the single version of the resolver' do
expect(resolver.single.field_options).to include(
arguments: match('foo' => ::Types::BaseArgument)
)
expect(subclass.single.field_options).to include(
arguments: match('foo' => ::Types::BaseArgument, 'inc' => ::Types::BaseArgument)
)
expect(resolver.single.arguments).to match('foo' => ::Types::BaseArgument)
expect(subclass.single.arguments).to match('foo' => ::Types::BaseArgument, 'inc' => ::Types::BaseArgument)
result = resolve(subclass.single, args: { foo: 7, inc: 1 })
expect(result).to eq(64)

View File

@ -10,7 +10,7 @@ RSpec.describe Resolvers::Clusters::AgentsResolver do
end
specify do
expect(described_class.field_options).to include(extras: include(:lookahead))
expect(described_class.extras).to include(:lookahead)
end
describe '#resolve' do
@ -52,17 +52,9 @@ end
RSpec.describe Resolvers::Clusters::AgentsResolver.single do
it { expect(described_class).to be < Resolvers::Clusters::AgentsResolver }
describe '.field_options' do
subject { described_class.field_options }
specify do
expect(subject).to include(
type: ::Types::Clusters::AgentType,
null: true,
extras: [:lookahead]
)
end
end
it { expect(described_class.type).to eq(::Types::Clusters::AgentType) }
it { expect(described_class.null).to eq(true) }
it { expect(described_class.extras).to include(:lookahead) }
describe 'arguments' do
subject { described_class.arguments[argument] }

View File

@ -7,7 +7,7 @@ RSpec.describe Resolvers::Kas::AgentConfigurationsResolver do
it { expect(described_class.type).to eq(Types::Kas::AgentConfigurationType.connection_type) }
it { expect(described_class.null).to be_truthy }
it { expect(described_class.field_options).to include(calls_gitaly: true) }
it { expect(described_class.calls_gitaly?).to eq(true) }
describe '#resolve' do
let_it_be(:project) { create(:project) }

View File

@ -38,12 +38,8 @@ RSpec.describe Resolvers::Projects::ForkTargetsResolver do
end
def resolve_targets(args, opts = {})
field_options = described_class.field_options.merge(
owner: resolver_parent,
name: 'field_value'
).merge(opts)
field = ::Types::BaseField.new(**field_options)
field_options = { owner: resolver_parent, resolver: described_class }.merge(opts)
field = ::Types::BaseField.from_options('field_value', **field_options)
resolve_field(field, project, args: args, ctx: { current_user: user }, object_type: resolver_parent)
end
end

View File

@ -52,8 +52,8 @@ RSpec.describe Subscriptions::IssuableUpdated do
let(:current_user) { unauthorized_user }
it 'unsubscribes the user' do
# GraphQL::Execution::Execute::Skip is returned when unsubscribed
expect(subject).to be_an(GraphQL::Execution::Execute::Skip)
# GraphQL::Execution::Skip is returned when unsubscribed
expect(subject).to be_an(GraphQL::Execution::Skip)
end
end
end

View File

@ -646,7 +646,7 @@ RSpec.describe UsersHelper do
expect(preload_queries).not_to exceed_query_limit(2)
expect(helper_queries).not_to exceed_query_limit(0)
expect(access_queries).not_to exceed_query_limit(0)
expect(access_queries).not_to exceed_query_limit(1)
end
end
end

View File

@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Auth::OAuth::AuthHash, feature_category: :user_management do
let(:provider) { 'ldap' }
let(:provider) { 'openid_connect' }
let(:auth_hash) do
described_class.new(
OmniAuth::AuthHash.new(
@ -19,7 +19,6 @@ RSpec.describe Gitlab::Auth::OAuth::AuthHash, feature_category: :user_management
)
end
let(:provider_config) { { 'args' => { 'gitlab_username_claim' => 'first_name' } } }
let(:uid_raw) do
+"CN=Onur K\xC3\xBC\xC3\xA7\xC3\xBCk,OU=Test,DC=example,DC=net"
end
@ -117,8 +116,17 @@ RSpec.describe Gitlab::Auth::OAuth::AuthHash, feature_category: :user_management
end
context 'custom username field provided' do
let(:provider_config) do
GitlabSettings::Options.build(
{
name: provider,
args: { 'gitlab_username_claim' => 'first_name' }
}
)
end
before do
allow(Gitlab::Auth::OAuth::Provider).to receive(:config_for).and_return(provider_config)
stub_omniauth_setting(providers: [provider_config])
end
it 'uses the custom field for the username within info' do

View File

@ -0,0 +1,172 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::MigrationHelpers::Swapping, feature_category: :database do
let(:connection) { ApplicationRecord.connection }
let(:migration_context) do
ActiveRecord::Migration
.new
.extend(described_class)
.extend(Gitlab::Database::MigrationHelpers)
end
let(:service_instance) { instance_double('Gitlab::Database::Migrations::SwapColumns', execute: nil) }
describe '#reset_trigger_function' do
let(:trigger_function_name) { 'existing_trigger_function' }
before do
connection.execute(<<~SQL)
CREATE FUNCTION #{trigger_function_name}() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
NEW."bigint_column" := NEW."integer_column";
RETURN NEW;
END;
$$;
SQL
end
it 'resets' do
recorder = ActiveRecord::QueryRecorder.new do
migration_context.reset_trigger_function(trigger_function_name)
end
expect(recorder.log).to include(/ALTER FUNCTION "existing_trigger_function" RESET ALL/)
end
end
describe '#swap_columns' do
let(:table) { :ci_pipeline_variables }
let(:column1) { :pipeline_id }
let(:column2) { :pipeline_id_convert_to_bigint }
it 'calls service' do
expect(::Gitlab::Database::Migrations::SwapColumns).to receive(:new).with(
migration_context: migration_context,
table: table,
column1: column1,
column2: column2
).and_return(service_instance)
migration_context.swap_columns(table, column1, column2)
end
end
describe '#swap_columns_default' do
let(:table) { :_test_table }
let(:column1) { :pipeline_id }
let(:column2) { :pipeline_id_convert_to_bigint }
it 'calls service' do
expect(::Gitlab::Database::Migrations::SwapColumnsDefault).to receive(:new).with(
migration_context: migration_context,
table: table,
column1: column1,
column2: column2
).and_return(service_instance)
migration_context.swap_columns_default(table, column1, column2)
end
end
describe '#swap_foreign_keys' do
let(:table) { :_test_swap_foreign_keys }
let(:referenced_table) { "#{table}_referenced" }
let(:foreign_key1) { :fkey_on_integer_column }
let(:foreign_key2) { :fkey_on_bigint_column }
before do
connection.execute(<<~SQL)
CREATE TABLE #{table} (
integer_column integer NOT NULL,
bigint_column bigint DEFAULT 0 NOT NULL
);
CREATE TABLE #{referenced_table} (
id bigint NOT NULL
);
ALTER TABLE ONLY #{referenced_table}
ADD CONSTRAINT pk PRIMARY KEY (id);
ALTER TABLE ONLY #{table}
ADD CONSTRAINT #{foreign_key1}
FOREIGN KEY (integer_column) REFERENCES #{referenced_table}(id) ON DELETE SET NULL;
ALTER TABLE ONLY #{table}
ADD CONSTRAINT #{foreign_key2}
FOREIGN KEY (bigint_column) REFERENCES #{referenced_table}(id) ON DELETE SET NULL;
SQL
end
shared_examples_for 'swapping foreign keys correctly' do
specify do
expect { migration_context.swap_foreign_keys(table, foreign_key1, foreign_key2) }
.to change {
find_foreign_key_by(foreign_key1).options[:column]
}.from('integer_column').to('bigint_column')
.and change {
find_foreign_key_by(foreign_key2).options[:column]
}.from('bigint_column').to('integer_column')
end
end
it_behaves_like 'swapping foreign keys correctly'
context 'when foreign key names are 63 bytes' do
let(:foreign_key1) { :f1_012345678901234567890123456789012345678901234567890123456789 }
let(:foreign_key2) { :f2_012345678901234567890123456789012345678901234567890123456789 }
it_behaves_like 'swapping foreign keys correctly'
end
private
def find_foreign_key_by(name)
connection.foreign_keys(table).find { |k| k.options[:name].to_s == name.to_s }
end
end
describe '#swap_indexes' do
let(:table) { :_test_swap_indexes }
let(:index1) { :index_on_integer }
let(:index2) { :index_on_bigint }
before do
connection.execute(<<~SQL)
CREATE TABLE #{table} (
integer_column integer NOT NULL,
bigint_column bigint DEFAULT 0 NOT NULL
);
CREATE INDEX #{index1} ON #{table} USING btree (integer_column);
CREATE INDEX #{index2} ON #{table} USING btree (bigint_column);
SQL
end
shared_examples_for 'swapping indexes correctly' do
specify do
expect { migration_context.swap_indexes(table, index1, index2) }
.to change { find_index_by(index1).columns }.from(['integer_column']).to(['bigint_column'])
.and change { find_index_by(index2).columns }.from(['bigint_column']).to(['integer_column'])
end
end
it_behaves_like 'swapping indexes correctly'
context 'when index names are 63 bytes' do
let(:index1) { :i1_012345678901234567890123456789012345678901234567890123456789 }
let(:index2) { :i2_012345678901234567890123456789012345678901234567890123456789 }
it_behaves_like 'swapping indexes correctly'
end
private
def find_index_by(name)
connection.indexes(table).find { |c| c.name == name.to_s }
end
end
end

View File

@ -2935,4 +2935,20 @@ RSpec.describe Gitlab::Database::MigrationHelpers, feature_category: :database d
it { expect(recorder.log).to be_empty }
end
end
describe '#lock_tables' do
let(:lock_statement) do
/LOCK TABLE ci_builds, ci_pipelines IN ACCESS EXCLUSIVE MODE/
end
subject(:recorder) do
ActiveRecord::QueryRecorder.new do
model.lock_tables(:ci_builds, :ci_pipelines)
end
end
it 'locks the tables' do
expect(recorder.log).to include(lock_statement)
end
end
end

View File

@ -0,0 +1,118 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::SwapColumnsDefault, feature_category: :database do
describe '#execute' do
let(:connection) { ApplicationRecord.connection }
let(:migration_context) do
Gitlab::Database::Migration[2.1]
.new('name', 'version')
.extend(Gitlab::Database::MigrationHelpers::Swapping)
end
let(:table) { :_test_swap_columns_and_defaults }
let(:column1) { :integer_column }
let(:column2) { :bigint_column }
subject(:execute_service) do
described_class.new(
migration_context: migration_context,
table: table,
column1: column1,
column2: column2
).execute
end
before do
connection.execute(sql)
end
context 'when defaults are static values' do
let(:sql) do
<<~SQL
CREATE TABLE #{table} (
id integer NOT NULL,
#{column1} integer DEFAULT 8 NOT NULL,
#{column2} bigint DEFAULT 100 NOT NULL
);
SQL
end
it 'swaps the default correctly' do
expect { execute_service }
.to change { find_column_by(column1).default }.to('100')
.and change { find_column_by(column2).default }.to('8')
.and not_change { find_column_by(column1).default_function }.from(nil)
.and not_change { find_column_by(column2).default_function }.from(nil)
end
end
context 'when default is sequence' do
let(:sql) do
<<~SQL
CREATE TABLE #{table} (
id integer NOT NULL,
#{column1} integer NOT NULL,
#{column2} bigint DEFAULT 100 NOT NULL
);
CREATE SEQUENCE #{table}_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE #{table}_seq OWNED BY #{table}.#{column1};
ALTER TABLE ONLY #{table} ALTER COLUMN #{column1} SET DEFAULT nextval('#{table}_seq'::regclass);
SQL
end
it 'swaps the default correctly' do
recorder = nil
expect { recorder = ActiveRecord::QueryRecorder.new { execute_service } }
.to change { find_column_by(column1).default }.to('100')
.and change { find_column_by(column1).default_function }.to(nil)
.and change { find_column_by(column2).default }.to(nil)
.and change {
find_column_by(column2).default_function
}.to("nextval('_test_swap_columns_and_defaults_seq'::regclass)")
expect(recorder.log).to include(
/SEQUENCE "_test_swap_columns_and_defaults_seq" OWNED BY "_test_swap_columns_and_defaults"."bigint_column"/
)
expect(recorder.log).to include(
/COLUMN "bigint_column" SET DEFAULT nextval\('_test_swap_columns_and_defaults_seq'::regclass\)/
)
end
end
context 'when defaults are the same' do
let(:sql) do
<<~SQL
CREATE TABLE #{table} (
id integer NOT NULL,
#{column1} integer DEFAULT 100 NOT NULL,
#{column2} bigint DEFAULT 100 NOT NULL
);
SQL
end
it 'does nothing' do
recorder = nil
expect { recorder = ActiveRecord::QueryRecorder.new { execute_service } }
.to not_change { find_column_by(column1).default }
.and not_change { find_column_by(column1).default_function }
.and not_change { find_column_by(column2).default }
.and not_change { find_column_by(column2).default_function }
expect(recorder.log).not_to include(/ALTER TABLE/)
end
end
private
def find_column_by(name)
connection.columns(table).find { |c| c.name == name.to_s }
end
end
end

View File

@ -0,0 +1,64 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::SwapColumns, feature_category: :database do
describe '#execute' do
let(:connection) { ApplicationRecord.connection }
let(:sql) do
<<~SQL
CREATE TABLE #{table} (
id integer NOT NULL,
#{column1} integer DEFAULT 8 NOT NULL,
#{column2} bigint DEFAULT 100 NOT NULL
);
SQL
end
let(:migration_context) do
Gitlab::Database::Migration[2.1]
.new('name', 'version')
.extend(Gitlab::Database::MigrationHelpers::Swapping)
end
let(:table) { :_test_swap_columns_and_defaults }
let(:column1) { :integer_column }
let(:column2) { :bigint_column }
subject(:execute_service) do
described_class.new(
migration_context: migration_context,
table: table,
column1: column1,
column2: column2
).execute
end
before do
connection.execute(sql)
end
shared_examples_for 'swapping columns correctly' do
specify do
expect { execute_service }
.to change { find_column_by(column1).sql_type }.from('integer').to('bigint')
.and change { find_column_by(column2).sql_type }.from('bigint').to('integer')
end
end
it_behaves_like 'swapping columns correctly'
context 'when column names are 63 bytes' do
let(:column1) { :int012345678901234567890123456789012345678901234567890123456789 }
let(:column2) { :big012345678901234567890123456789012345678901234567890123456789 }
it_behaves_like 'swapping columns correctly'
end
private
def find_column_by(name)
connection.columns(table).find { |c| c.name == name.to_s }
end
end
end

View File

@ -8,10 +8,9 @@ RSpec.describe Gitlab::Graphql::Timeout do
end
it 'sends the error to our GraphQL logger' do
parent_type = double(graphql_name: 'parent_type')
field = double(graphql_name: 'field')
field = double(path: 'parent_type.field')
query = double(query_string: 'query_string', provided_variables: 'provided_variables')
error = GraphQL::Schema::Timeout::TimeoutError.new(parent_type, field)
error = GraphQL::Schema::Timeout::TimeoutError.new(field)
expect(Gitlab::GraphqlLogger)
.to receive(:error)

View File

@ -88,10 +88,12 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
describe '.save_aggregated_metrics' do
subject(:save_aggregated_metrics) do
described_class.save_aggregated_metrics(metric_name: metric_1,
time_period: time_period,
recorded_at_timestamp: recorded_at,
data: data)
described_class.save_aggregated_metrics(
metric_name: metric_1,
time_period: time_period,
recorded_at_timestamp: recorded_at,
data: data
)
end
context 'with compatible data argument' do

View File

@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountCiInternalPipelinesMetric,
feature_category: :service_ping do
feature_category: :service_ping do
let_it_be(:ci_pipeline_1) { create(:ci_pipeline, source: :external, created_at: 3.days.ago) }
let_it_be(:ci_pipeline_2) { create(:ci_pipeline, source: :push, created_at: 3.days.ago) }
let_it_be(:old_pipeline) { create(:ci_pipeline, source: :push, created_at: 2.months.ago) }

View File

@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::CountIssuesCreatedManuallyFromAlertsMetric,
feature_category: :service_ping do
feature_category: :service_ping do
let_it_be(:issue) { create(:issue) }
let_it_be(:issue_with_alert) { create(:issue, :with_alert) }

View File

@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::IncomingEmailEncryptedSecretsEnabledMetric,
feature_category: :service_ping do
feature_category: :service_ping do
it_behaves_like 'a correct instrumented metric value', { time_frame: 'none', data_source: 'ruby' } do
let(:expected_value) { ::Gitlab::Email::IncomingEmail.encrypted_secrets.active? }
end

View File

@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Instrumentations::ServiceDeskEmailEncryptedSecretsEnabledMetric,
feature_category: :service_ping do
feature_category: :service_ping do
it_behaves_like 'a correct instrumented metric value', { time_frame: 'none', data_source: 'ruby' } do
let(:expected_value) { ::Gitlab::Email::ServiceDeskEmail.encrypted_secrets.active? }
end

View File

@ -75,9 +75,9 @@ RSpec.describe Gitlab::Usage::Metrics::Query do
describe '.histogram' do
it 'returns the histogram sql' do
expect(described_class.for(:histogram, AlertManagement::HttpIntegration.active,
:project_id, buckets: 1..2, bucket_size: 101))
.to match(/^WITH "count_cte" AS MATERIALIZED/)
expect(described_class.for(
:histogram, AlertManagement::HttpIntegration.active, :project_id, buckets: 1..2, bucket_size: 101
)).to match(/^WITH "count_cte" AS MATERIALIZED/)
end
end

View File

@ -72,17 +72,18 @@ RSpec.describe Gitlab::UsageDataQueries do
describe '.add' do
it 'returns the combined raw SQL with an inner query' do
expect(described_class.add('SELECT COUNT("users"."id") FROM "users"',
'SELECT COUNT("issues"."id") FROM "issues"'))
.to eq('SELECT (SELECT COUNT("users"."id") FROM "users") + (SELECT COUNT("issues"."id") FROM "issues")')
expect(described_class.add(
'SELECT COUNT("users"."id") FROM "users"',
'SELECT COUNT("issues"."id") FROM "issues"'
)).to eq('SELECT (SELECT COUNT("users"."id") FROM "users") + (SELECT COUNT("issues"."id") FROM "issues")')
end
end
describe '.histogram' do
it 'returns the histogram sql' do
expect(described_class.histogram(AlertManagement::HttpIntegration.active,
:project_id, buckets: 1..2, bucket_size: 101))
.to match(/^WITH "count_cte" AS MATERIALIZED/)
expect(described_class.histogram(
AlertManagement::HttpIntegration.active, :project_id, buckets: 1..2, bucket_size: 101
)).to match(/^WITH "count_cte" AS MATERIALIZED/)
end
end

View File

@ -151,8 +151,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
it 'includes accurate usage_activity_by_stage data' do
for_defined_days_back do
user = create(:user)
project = create(:project, :repository_private,
:test_repo, :remote_mirror, creator: user)
project = create(:project, :repository_private, :test_repo, :remote_mirror, creator: user)
create(:merge_request, source_project: project)
create(:deploy_key, user: user)
create(:key, user: user)
@ -830,8 +829,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :servic
it 'gathers Service Desk data' do
create_list(:issue, 2, :confidential, author: Users::Internal.support_bot, project: project)
expect(subject).to eq(service_desk_enabled_projects: 1,
service_desk_issues: 2)
expect(subject).to eq(service_desk_enabled_projects: 1, service_desk_issues: 2)
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe SwapColumnsForCiPipelineVariablesPipelineIdBigint, feature_category: :continuous_integration do
it_behaves_like(
'swap conversion columns',
table_name: :ci_pipeline_variables,
from: :pipeline_id,
to: :pipeline_id_convert_to_bigint
)
end

View File

@ -30,4 +30,41 @@ RSpec.describe WorkItems::RelatedWorkItemLink, type: :model, feature_category: :
describe '.issuable_name' do
it { expect(described_class.issuable_name).to eq('work item') }
end
describe 'validations' do
describe '#validate_related_link_restrictions' do
using RSpec::Parameterized::TableSyntax
where(:source_type_sym, :target_types, :valid) do
:incident | [:incident, :test_case, :issue, :task, :ticket] | false
:ticket | [:incident, :test_case, :issue, :task, :ticket] | false
:test_case | [:incident, :test_case, :issue, :task, :ticket] | false
:task | [:incident, :test_case, :ticket] | false
:issue | [:incident, :test_case, :ticket] | false
:task | [:task, :issue] | true
:issue | [:task, :issue] | true
end
with_them do
it 'validates the related link' do
target_types.each do |target_type_sym|
source_type = WorkItems::Type.default_by_type(source_type_sym)
target_type = WorkItems::Type.default_by_type(target_type_sym)
source = build(:work_item, work_item_type: source_type, project: project)
target = build(:work_item, work_item_type: target_type, project: project)
link = build(:work_item_link, source: source, target: target)
opposite_link = build(:work_item_link, source: target, target: source)
expect(link.valid?).to eq(valid)
expect(opposite_link.valid?).to eq(valid)
next if valid
expect(link.errors.messages[:source]).to contain_exactly(
"#{source_type.name.downcase.pluralize} cannot be related to #{target_type.name.downcase.pluralize}"
)
end
end
end
end
end
end

View File

@ -239,6 +239,7 @@ RSpec.describe 'GitlabSchema configurations', feature_category: :integrations do
graphql_name 'BarEnum'
value 'FOOBAR', value: 'foobar', deprecated: { milestone: '0.1', reason: :renamed }
value 'FOOBARNEW', value: 'foobarnew'
end)
field :baz, GraphQL::Types::Boolean do

View File

@ -125,7 +125,7 @@ RSpec.describe 'Setting assignees of a merge request', :assume_throttled, featur
context 'when passing append as true' do
let(:mode) { Types::MutationOperationModeEnum.enum[:append] }
let(:input) { { assignee_usernames: [assignee2.username], operation_mode: mode } }
let(:db_query_limit) { 22 }
let(:db_query_limit) { 23 }
before do
# In CE, APPEND is a NOOP as you can't have multiple assignees

View File

@ -8,7 +8,7 @@ RSpec.describe "Add linked items to a work item", feature_category: :portfolio_m
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :private, group: group) }
let_it_be(:reporter) { create(:user).tap { |user| group.add_reporter(user) } }
let_it_be(:project_work_item) { create(:work_item, project: project) }
let_it_be(:project_work_item) { create(:work_item, :issue, project: project) }
let_it_be(:related1) { create(:work_item, project: project) }
let_it_be(:related2) { create(:work_item, project: project) }
@ -116,6 +116,20 @@ RSpec.describe "Add linked items to a work item", feature_category: :portfolio_m
end
end
context 'when type cannot be linked' do
let_it_be(:req) { create(:work_item, :requirement, project: project) }
let(:input) { { 'id' => work_item.to_global_id.to_s, 'workItemsIds' => [req.to_global_id.to_s] } }
it 'returns an error message' do
post_graphql_mutation(mutation, current_user: current_user)
expect(mutation_response["errors"]).to eq([
"#{req.to_reference} cannot be added: issues cannot be related to requirements"
])
end
end
context 'when there are more than the max allowed items to link' do
let(:max_work_items) { Mutations::WorkItems::LinkedItems::Base::MAX_WORK_ITEMS }
let(:ids_to_link) { (0..max_work_items).map { |i| "gid://gitlab/WorkItem/#{i}" } }

View File

@ -363,27 +363,6 @@ RSpec.describe 'getting merge request listings nested in a project', feature_cat
include_examples 'N+1 query check'
end
context 'when requesting participants' do
let(:requested_fields) { 'participants { nodes { name } }' }
before do
create(:award_emoji, :upvote, awardable: merge_request_a)
create(:award_emoji, :upvote, awardable: merge_request_b)
create(:award_emoji, :upvote, awardable: merge_request_c)
note_with_emoji_a = create(:note_on_merge_request, noteable: merge_request_a, project: project)
note_with_emoji_b = create(:note_on_merge_request, noteable: merge_request_b, project: project)
note_with_emoji_c = create(:note_on_merge_request, noteable: merge_request_c, project: project)
create(:award_emoji, :upvote, awardable: note_with_emoji_a)
create(:award_emoji, :upvote, awardable: note_with_emoji_b)
create(:award_emoji, :upvote, awardable: note_with_emoji_c)
end
# Executes 3 extra queries to fetch participant_attrs
include_examples 'N+1 query check', threshold: 3
end
context 'when requesting labels' do
let(:requested_fields) { ['labels { nodes { id } }'] }
@ -425,7 +404,6 @@ RSpec.describe 'getting merge request listings nested in a project', feature_cat
<<~SELECT
assignees { nodes { username } }
reviewers { nodes { username } }
participants { nodes { username } }
headPipeline { status }
timelogs { nodes { timeSpent } }
SELECT
@ -492,7 +470,6 @@ RSpec.describe 'getting merge request listings nested in a project', feature_cat
a_hash_including(
'assignees' => user_collection,
'reviewers' => user_collection,
'participants' => user_collection,
'headPipeline' => { 'status' => be_present },
'timelogs' => { 'nodes' => be_one }
)))

View File

@ -42,13 +42,6 @@ RSpec.describe 'GraphQL', feature_category: :shared do
post_graphql(query, variables: variables)
end
it 'does not instantiate any query analyzers' do # they are static and re-used
expect(GraphQL::Analysis::QueryComplexity).not_to receive(:new)
expect(GraphQL::Analysis::QueryDepth).not_to receive(:new)
2.times { post_graphql(query, variables: variables) }
end
end
context 'with no variables' do

View File

@ -80,11 +80,11 @@ module GraphqlHelpers
# All resolution goes through fields, so we need to create one here that
# uses our resolver. Thankfully, apart from the field name, resolvers
# contain all the configuration needed to define one.
field_options = resolver_class.field_options.merge(
field = ::Types::BaseField.new(
resolver_class: resolver_class,
owner: resolver_parent,
name: 'field_value'
)
field = ::Types::BaseField.new(**field_options)
# All mutations accept a single `:input` argument. Wrap arguments here.
args = { input: args } if resolver_class <= ::Mutations::BaseMutation && !args.key?(:input)
@ -221,6 +221,7 @@ module GraphqlHelpers
def resolver_instance(resolver_class, obj: nil, ctx: {}, field: nil, schema: GitlabSchema, subscription_update: false)
if ctx.is_a?(Hash)
q = double('Query', schema: schema, subscription_update?: subscription_update, warden: GraphQL::Schema::Warden::PassThruWarden)
allow(q).to receive(:after_lazy) { |value, &block| schema.after_lazy(value, &block) }
ctx = GraphQL::Query::Context.new(query: q, object: obj, values: ctx)
end

View File

@ -32,7 +32,7 @@ RSpec.shared_examples 'a GraphQL type with design fields' do
let(:query) { GraphQL::Query.new(schema) }
let(:context) { query.context }
let(:field) { described_class.fields['image'] }
let(:args) { GraphQL::Query::Arguments::NO_ARGS }
let(:args) { { parent: nil } }
let(:instance) { instantiate(object_id) }
let(:instance_b) { instantiate(object_id_b) }
@ -42,13 +42,12 @@ RSpec.shared_examples 'a GraphQL type with design fields' do
end
def resolve_image(instance)
field.resolve_field(instance, args, context)
field.resolve(instance, args, context)
end
before do
context[:current_user] = current_user
allow(Ability).to receive(:allowed?).with(current_user, :read_design, anything).and_return(true)
allow(context).to receive(:parent).and_return(nil)
end
it 'resolves to the design image URL' do

View File

@ -0,0 +1,117 @@
# frozen_string_literal: true
COLUMN_OPTIONS_TO_REMAIN =
%i[
null
serial?
collation
default
default_function
].freeze
SQL_TYPE_OPTIONS_TO_REMAIN =
%i[
precision
scale
].freeze
SQL_TYPE_OPTIONS_TO_CHANGE =
%i[
type
sql_type
limit
].freeze
RSpec.shared_examples 'swap conversion columns' do |table_name:, from:, to:|
it 'correctly swaps conversion columns' do
before_from_column = before_to_column = before_indexes = before_foreign_keys = nil
after_from_column = after_to_column = after_indexes = after_foreign_keys = nil
expect_column_type_is_changed_but_others_remain_unchanged = -> do
# SQL type is changed
SQL_TYPE_OPTIONS_TO_CHANGE.each do |sql_type_option|
expect(
after_from_column.sql_type_metadata.public_send(sql_type_option)
).to eq(
before_to_column.sql_type_metadata.public_send(sql_type_option)
)
expect(
after_to_column.sql_type_metadata.public_send(sql_type_option)
).to eq(
before_from_column.sql_type_metadata.public_send(sql_type_option)
)
end
# column metadata remains unchanged
COLUMN_OPTIONS_TO_REMAIN.each do |column_option|
expect(
after_from_column.public_send(column_option)
).to eq(
before_from_column.public_send(column_option)
)
expect(
after_to_column.public_send(column_option)
).to eq(
before_to_column.public_send(column_option)
)
end
SQL_TYPE_OPTIONS_TO_REMAIN.each do |sql_type_option|
expect(
after_from_column.sql_type_metadata.public_send(sql_type_option)
).to eq(
before_from_column.sql_type_metadata.public_send(sql_type_option)
)
expect(
after_to_column.sql_type_metadata.public_send(sql_type_option)
).to eq(
before_to_column.sql_type_metadata.public_send(sql_type_option)
)
end
# indexes remain unchanged
expect(before_indexes).to eq(after_indexes)
# foreign keys remain unchanged
expect(before_foreign_keys).to eq(after_foreign_keys)
end
find_column_by = ->(name) do
active_record_base.connection.columns(table_name).find { |c| c.name == name.to_s }
end
find_indexes = -> do
active_record_base.connection.indexes(table_name)
end
find_foreign_keys = -> do
Gitlab::Database::PostgresForeignKey.by_constrained_table_name(table_name)
end
reversible_migration do |migration|
migration.before -> {
before_from_column = find_column_by.call(from)
before_to_column = find_column_by.call(to)
before_indexes = find_indexes
before_foreign_keys = find_foreign_keys
next if after_from_column.nil?
# For migrate down
expect_column_type_is_changed_but_others_remain_unchanged.call
}
migration.after -> {
after_from_column = find_column_by.call(from)
after_to_column = find_column_by.call(to)
after_indexes = find_indexes
after_foreign_keys = find_foreign_keys
expect_column_type_is_changed_but_others_remain_unchanged.call
}
end
end
end

View File

@ -408,28 +408,6 @@ RSpec.shared_examples 'graphql issue list request spec' do
include_examples 'N+1 query check'
end
context 'when requesting participants' do
let(:search_params) { { iids: [issue_a.iid.to_s, issue_c.iid.to_s] } }
let(:requested_fields) { 'participants { nodes { name } }' }
before do
create(:award_emoji, :upvote, awardable: issue_a)
create(:award_emoji, :upvote, awardable: issue_b)
create(:award_emoji, :upvote, awardable: issue_c)
note_with_emoji_a = create(:note_on_issue, noteable: issue_a, project: issue_a.project)
note_with_emoji_b = create(:note_on_issue, noteable: issue_b, project: issue_b.project)
note_with_emoji_c = create(:note_on_issue, noteable: issue_c, project: issue_c.project)
create(:award_emoji, :upvote, awardable: note_with_emoji_a)
create(:award_emoji, :upvote, awardable: note_with_emoji_b)
create(:award_emoji, :upvote, awardable: note_with_emoji_c)
end
# Executes 3 extra queries to fetch participant_attrs
include_examples 'N+1 query check', threshold: 3
end
context 'when requesting labels', :use_sql_query_cache do
let(:requested_fields) { 'labels { nodes { id } }' }
let(:extra_iid_for_second_query) { same_project_issue2.iid.to_s }