Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
6d60f91076
commit
427b23c127
|
|
@ -1 +1 @@
|
|||
1.10.0
|
||||
1.11.0
|
||||
|
|
|
|||
|
|
@ -1,39 +1,92 @@
|
|||
<script>
|
||||
import { createNamespacedHelpers, mapState, mapActions } from 'vuex';
|
||||
import { sprintf, s__ } from '~/locale';
|
||||
import ClusterFormDropdown from './cluster_form_dropdown.vue';
|
||||
import RegionDropdown from './region_dropdown.vue';
|
||||
import RoleNameDropdown from './role_name_dropdown.vue';
|
||||
import SecurityGroupDropdown from './security_group_dropdown.vue';
|
||||
import SubnetDropdown from './subnet_dropdown.vue';
|
||||
import VpcDropdown from './vpc_dropdown.vue';
|
||||
|
||||
const { mapState: mapRegionsState, mapActions: mapRegionsActions } = createNamespacedHelpers(
|
||||
'regions',
|
||||
);
|
||||
const { mapState: mapVpcsState, mapActions: mapVpcActions } = createNamespacedHelpers('vpcs');
|
||||
const { mapState: mapSubnetsState, mapActions: mapSubnetActions } = createNamespacedHelpers(
|
||||
'subnets',
|
||||
);
|
||||
|
||||
export default {
|
||||
components: {
|
||||
ClusterFormDropdown,
|
||||
RegionDropdown,
|
||||
RoleNameDropdown,
|
||||
SecurityGroupDropdown,
|
||||
SubnetDropdown,
|
||||
VpcDropdown,
|
||||
},
|
||||
computed: {
|
||||
...mapState(['selectedRegion']),
|
||||
...mapState(['selectedRegion', 'selectedVpc', 'selectedSubnet']),
|
||||
...mapRegionsState({
|
||||
regions: 'items',
|
||||
isLoadingRegions: 'isLoadingItems',
|
||||
loadingRegionsError: 'loadingItemsError',
|
||||
}),
|
||||
...mapVpcsState({
|
||||
vpcs: 'items',
|
||||
isLoadingVpcs: 'isLoadingItems',
|
||||
loadingVpcsError: 'loadingItemsError',
|
||||
}),
|
||||
...mapSubnetsState({
|
||||
subnets: 'items',
|
||||
isLoadingSubnets: 'isLoadingItems',
|
||||
loadingSubnetsError: 'loadingItemsError',
|
||||
}),
|
||||
vpcDropdownDisabled() {
|
||||
return !this.selectedRegion;
|
||||
},
|
||||
subnetDropdownDisabled() {
|
||||
return !this.selectedVpc;
|
||||
},
|
||||
vpcDropdownHelpText() {
|
||||
return sprintf(
|
||||
s__(
|
||||
'ClusterIntegration|Select a VPC to use for your EKS Cluster resources. To use a new VPC, first create one on %{startLink}Amazon Web Services%{endLink}.',
|
||||
),
|
||||
{
|
||||
startLink:
|
||||
'<a href="https://console.aws.amazon.com/vpc/home?#vpc" target="_blank" rel="noopener noreferrer">',
|
||||
endLink: '</a>',
|
||||
},
|
||||
false,
|
||||
);
|
||||
},
|
||||
subnetDropdownHelpText() {
|
||||
return sprintf(
|
||||
s__(
|
||||
'ClusterIntegration|Choose the %{startLink}subnets%{endLink} in your VPC where your worker nodes will run.',
|
||||
),
|
||||
{
|
||||
startLink:
|
||||
'<a href="https://console.aws.amazon.com/vpc/home?#subnets" target="_blank" rel="noopener noreferrer">',
|
||||
endLink: '</a>',
|
||||
},
|
||||
false,
|
||||
);
|
||||
},
|
||||
},
|
||||
mounted() {
|
||||
this.fetchRegions();
|
||||
},
|
||||
methods: {
|
||||
...mapActions(['setRegion']),
|
||||
...mapRegionsActions({
|
||||
fetchRegions: 'fetchItems',
|
||||
}),
|
||||
...mapActions(['setRegion', 'setVpc', 'setSubnet']),
|
||||
...mapRegionsActions({ fetchRegions: 'fetchItems' }),
|
||||
...mapVpcActions({ fetchVpcs: 'fetchItems' }),
|
||||
...mapSubnetActions({ fetchSubnets: 'fetchItems' }),
|
||||
setRegionAndFetchVpcs(region) {
|
||||
this.setRegion({ region });
|
||||
this.fetchVpcs({ region });
|
||||
},
|
||||
setVpcAndFetchSubnets(vpc) {
|
||||
this.setVpc({ vpc });
|
||||
this.fetchSubnets({ vpc });
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
|
@ -54,8 +107,52 @@ export default {
|
|||
:regions="regions"
|
||||
:error="loadingRegionsError"
|
||||
:loading="isLoadingRegions"
|
||||
@input="setRegion({ region: $event })"
|
||||
@input="setRegionAndFetchVpcs($event)"
|
||||
/>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="label-bold" name="eks-vpc" for="eks-vpc">{{
|
||||
s__('ClusterIntegration|VPC')
|
||||
}}</label>
|
||||
<cluster-form-dropdown
|
||||
field-id="eks-vpc"
|
||||
field-name="eks-vpc"
|
||||
:input="selectedVpc"
|
||||
:items="vpcs"
|
||||
:loading="isLoadingVpcs"
|
||||
:disabled="vpcDropdownDisabled"
|
||||
:disabled-text="s__('ClusterIntegration|Select a region to choose a VPC')"
|
||||
:loading-text="s__('ClusterIntegration|Loading VPCs')"
|
||||
:placeholder="s__('ClusterIntergation|Select a VPC')"
|
||||
:search-field-placeholder="s__('ClusterIntegration|Search VPCs')"
|
||||
:empty-text="s__('ClusterIntegration|No VPCs found')"
|
||||
:has-errors="loadingVpcsError"
|
||||
:error-message="s__('ClusterIntegration|Could not load VPCs for the selected region')"
|
||||
@input="setVpcAndFetchSubnets($event)"
|
||||
/>
|
||||
<p class="form-text text-muted" v-html="vpcDropdownHelpText"></p>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="label-bold" name="eks-subnet" for="eks-subnet">{{
|
||||
s__('ClusterIntegration|Subnet')
|
||||
}}</label>
|
||||
<cluster-form-dropdown
|
||||
field-id="eks-subnet"
|
||||
field-name="eks-subnet"
|
||||
:input="selectedSubnet"
|
||||
:items="subnets"
|
||||
:loading="isLoadingSubnets"
|
||||
:disabled="subnetDropdownDisabled"
|
||||
:disabled-text="s__('ClusterIntegration|Select a VPC to choose a subnet')"
|
||||
:loading-text="s__('ClusterIntegration|Loading subnets')"
|
||||
:placeholder="s__('ClusterIntergation|Select a subnet')"
|
||||
:search-field-placeholder="s__('ClusterIntegration|Search subnets')"
|
||||
:empty-text="s__('ClusterIntegration|No subnet found')"
|
||||
:has-errors="loadingSubnetsError"
|
||||
:error-message="s__('ClusterIntegration|Could not load subnets for the selected VPC')"
|
||||
@input="setSubnet({ subnet: $event })"
|
||||
/>
|
||||
<p class="form-text text-muted" v-html="subnetDropdownHelpText"></p>
|
||||
</div>
|
||||
</form>
|
||||
</template>
|
||||
|
|
|
|||
|
|
@ -17,4 +17,45 @@ export const fetchRegions = () =>
|
|||
.send();
|
||||
});
|
||||
|
||||
export const fetchVpcs = () =>
|
||||
new Promise((resolve, reject) => {
|
||||
const ec2 = new EC2();
|
||||
|
||||
ec2
|
||||
.describeVpcs()
|
||||
.on('success', ({ data: { Vpcs: vpcs } }) => {
|
||||
const transformedVpcs = vpcs.map(({ VpcId: id }) => ({ id, name: id }));
|
||||
|
||||
resolve(transformedVpcs);
|
||||
})
|
||||
.on('error', error => {
|
||||
reject(error);
|
||||
})
|
||||
.send();
|
||||
});
|
||||
|
||||
export const fetchSubnets = ({ vpc }) =>
|
||||
new Promise((resolve, reject) => {
|
||||
const ec2 = new EC2();
|
||||
|
||||
ec2
|
||||
.describeSubnets({
|
||||
Filters: [
|
||||
{
|
||||
Name: 'vpc-id',
|
||||
Values: [vpc.id],
|
||||
},
|
||||
],
|
||||
})
|
||||
.on('success', ({ data: { Subnets: subnets } }) => {
|
||||
const transformedSubnets = subnets.map(({ SubnetId: id }) => ({ id, name: id }));
|
||||
|
||||
resolve(transformedSubnets);
|
||||
})
|
||||
.on('error', error => {
|
||||
reject(error);
|
||||
})
|
||||
.send();
|
||||
});
|
||||
|
||||
export default () => {};
|
||||
|
|
|
|||
|
|
@ -4,4 +4,12 @@ export const setRegion = ({ commit }, payload) => {
|
|||
commit(types.SET_REGION, payload);
|
||||
};
|
||||
|
||||
export const setVpc = ({ commit }, payload) => {
|
||||
commit(types.SET_VPC, payload);
|
||||
};
|
||||
|
||||
export const setSubnet = ({ commit }, payload) => {
|
||||
commit(types.SET_SUBNET, payload);
|
||||
};
|
||||
|
||||
export default () => {};
|
||||
|
|
|
|||
|
|
@ -4,10 +4,10 @@ export default fetchItems => ({
|
|||
requestItems: ({ commit }) => commit(types.REQUEST_ITEMS),
|
||||
receiveItemsSuccess: ({ commit }, payload) => commit(types.RECEIVE_ITEMS_SUCCESS, payload),
|
||||
receiveItemsError: ({ commit }, payload) => commit(types.RECEIVE_ITEMS_ERROR, payload),
|
||||
fetchItems: ({ dispatch }) => {
|
||||
fetchItems: ({ dispatch }, payload) => {
|
||||
dispatch('requestItems');
|
||||
|
||||
return fetchItems()
|
||||
return fetchItems(payload)
|
||||
.then(items => dispatch('receiveItemsSuccess', { items }))
|
||||
.catch(error => dispatch('receiveItemsError', { error }));
|
||||
},
|
||||
|
|
|
|||
|
|
@ -19,6 +19,14 @@ const createStore = () =>
|
|||
namespaced: true,
|
||||
...clusterDropdownStore(awsServices.fetchRegions),
|
||||
},
|
||||
vpcs: {
|
||||
namespaced: true,
|
||||
...clusterDropdownStore(awsServices.fetchVpcs),
|
||||
},
|
||||
subnets: {
|
||||
namespaced: true,
|
||||
...clusterDropdownStore(awsServices.fetchSubnets),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -1,2 +1,3 @@
|
|||
// eslint-disable-next-line import/prefer-default-export
|
||||
export const SET_REGION = 'SET_REGION';
|
||||
export const SET_VPC = 'SET_VPC';
|
||||
export const SET_SUBNET = 'SET_SUBNET';
|
||||
|
|
|
|||
|
|
@ -4,4 +4,10 @@ export default {
|
|||
[types.SET_REGION](state, { region }) {
|
||||
state.selectedRegion = region;
|
||||
},
|
||||
[types.SET_VPC](state, { vpc }) {
|
||||
state.selectedVpc = vpc;
|
||||
},
|
||||
[types.SET_SUBNET](state, { subnet }) {
|
||||
state.selectedSubnet = subnet;
|
||||
},
|
||||
};
|
||||
|
|
|
|||
|
|
@ -91,7 +91,7 @@ module UploadsActions
|
|||
|
||||
upload_paths = uploader.upload_paths(params[:filename])
|
||||
upload = Upload.find_by(model: model, uploader: uploader_class.to_s, path: upload_paths)
|
||||
upload&.build_uploader
|
||||
upload&.retrieve_uploader
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
|
|
|
|||
|
|
@ -86,3 +86,5 @@ module Ci
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
Ci::PipelineSchedule.prepend_if_ee('EE::Ci::PipelineSchedule')
|
||||
|
|
|
|||
|
|
@ -45,3 +45,5 @@ module Ci
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
Ci::Trigger.prepend_if_ee('EE::Ci::Trigger')
|
||||
|
|
|
|||
|
|
@ -64,3 +64,5 @@ module Clusters
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
Clusters::Concerns::ApplicationCore.prepend_if_ee('EE::Clusters::Concerns::ApplicationCore')
|
||||
|
|
|
|||
|
|
@ -72,8 +72,15 @@ class CommitCollection
|
|||
end.compact]
|
||||
|
||||
# Replace the commits, keeping the same order
|
||||
@commits = @commits.map do |c|
|
||||
replacements.fetch(c.id, c)
|
||||
@commits = @commits.map do |original_commit|
|
||||
# Return the original instance: if it didn't need to be batchloaded, it was
|
||||
# already enriched.
|
||||
batch_loaded_commit = replacements.fetch(original_commit.id, original_commit)
|
||||
|
||||
# If batch loading the commit failed, fall back to the original commit.
|
||||
# We need to explicitly check `.nil?` since otherwise a `BatchLoader` instance
|
||||
# that looks like `nil` is returned.
|
||||
batch_loaded_commit.nil? ? original_commit : batch_loaded_commit
|
||||
end
|
||||
|
||||
self
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ class Upload < ApplicationRecord
|
|||
scope :with_files_stored_remotely, -> { where(store: ObjectStorage::Store::REMOTE) }
|
||||
|
||||
before_save :calculate_checksum!, if: :foreground_checksummable?
|
||||
after_commit :schedule_checksum, if: :checksummable?
|
||||
after_commit :schedule_checksum, if: :needs_checksum?
|
||||
|
||||
# as the FileUploader is not mounted, the default CarrierWave ActiveRecord
|
||||
# hooks are not executed and the file will not be deleted
|
||||
|
|
@ -53,20 +53,41 @@ class Upload < ApplicationRecord
|
|||
|
||||
def calculate_checksum!
|
||||
self.checksum = nil
|
||||
return unless checksummable?
|
||||
return unless needs_checksum?
|
||||
|
||||
self.checksum = Digest::SHA256.file(absolute_path).hexdigest
|
||||
end
|
||||
|
||||
# Initialize the associated Uploader class with current model
|
||||
#
|
||||
# @param [String] mounted_as
|
||||
# @return [GitlabUploader] one of the subclasses, defined at the model's uploader attribute
|
||||
def build_uploader(mounted_as = nil)
|
||||
uploader_class.new(model, mounted_as || mount_point).tap do |uploader|
|
||||
uploader.upload = self
|
||||
end
|
||||
end
|
||||
|
||||
# Initialize the associated Uploader class with current model and
|
||||
# retrieve existing file from the store to a local cache
|
||||
#
|
||||
# @param [String] mounted_as
|
||||
# @return [GitlabUploader] one of the subclasses, defined at the model's uploader attribute
|
||||
def retrieve_uploader(mounted_as = nil)
|
||||
build_uploader(mounted_as).tap do |uploader|
|
||||
uploader.retrieve_from_store!(identifier)
|
||||
end
|
||||
end
|
||||
|
||||
# This checks for existence of the upload on storage
|
||||
#
|
||||
# @return [Boolean] whether upload exists on storage
|
||||
def exist?
|
||||
exist = File.exist?(absolute_path)
|
||||
exist = if local?
|
||||
File.exist?(absolute_path)
|
||||
else
|
||||
retrieve_uploader.exists?
|
||||
end
|
||||
|
||||
# Help sysadmins find missing upload files
|
||||
if persisted? && !exist
|
||||
|
|
@ -91,18 +112,24 @@ class Upload < ApplicationRecord
|
|||
store == ObjectStorage::Store::LOCAL
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def delete_file!
|
||||
build_uploader.remove!
|
||||
end
|
||||
|
||||
def checksummable?
|
||||
# Returns whether generating checksum is needed
|
||||
#
|
||||
# This takes into account whether file exists, if any checksum exists
|
||||
# or if the storage has checksum generation code implemented
|
||||
#
|
||||
# @return [Boolean] whether generating a checksum is needed
|
||||
def needs_checksum?
|
||||
checksum.nil? && local? && exist?
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def delete_file!
|
||||
retrieve_uploader.remove!
|
||||
end
|
||||
|
||||
def foreground_checksummable?
|
||||
checksummable? && size <= CHECKSUM_THRESHOLD
|
||||
needs_checksum? && size <= CHECKSUM_THRESHOLD
|
||||
end
|
||||
|
||||
def schedule_checksum
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class AvatarUploader < GitlabUploader
|
|||
end
|
||||
|
||||
def absolute_path
|
||||
self.class.absolute_path(model.avatar.upload)
|
||||
self.class.absolute_path(upload)
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -99,6 +99,17 @@ class GitlabUploader < CarrierWave::Uploader::Base
|
|||
end
|
||||
end
|
||||
|
||||
# Used to replace an existing upload with another +file+ without modifying stored metadata
|
||||
# Use this method only to repair/replace an existing upload, or to upload to a Geo secondary node
|
||||
#
|
||||
# @param [CarrierWave::SanitizedFile] file that will replace existing upload
|
||||
# @return CarrierWave::SanitizedFile
|
||||
def replace_file_without_saving!(file)
|
||||
raise ArgumentError, 'should be a CarrierWave::SanitizedFile' unless file.is_a? CarrierWave::SanitizedFile
|
||||
|
||||
storage.store!(file)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Designed to be overridden by child uploaders that have a dynamic path
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class ImportIssuesCsvWorker
|
|||
@project = Project.find(project_id)
|
||||
@upload = Upload.find(upload_id)
|
||||
|
||||
importer = Issues::ImportCsvService.new(@user, @project, @upload.build_uploader)
|
||||
importer = Issues::ImportCsvService.new(@user, @project, @upload.retrieve_uploader)
|
||||
importer.execute
|
||||
|
||||
@upload.destroy
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ module ObjectStorage
|
|||
|
||||
def build_uploader(subject, mount_point)
|
||||
case subject
|
||||
when Upload then subject.build_uploader(mount_point)
|
||||
when Upload then subject.retrieve_uploader(mount_point)
|
||||
else
|
||||
subject.send(mount_point) # rubocop:disable GitlabSecurity/PublicSend
|
||||
end
|
||||
|
|
|
|||
|
|
@ -119,7 +119,7 @@ module ObjectStorage
|
|||
end
|
||||
|
||||
def build_uploaders(uploads)
|
||||
uploads.map { |upload| upload.build_uploader(@mounted_as) }
|
||||
uploads.map { |upload| upload.retrieve_uploader(@mounted_as) }
|
||||
end
|
||||
|
||||
def migrate(uploads)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Add pull_mirror_branch_prefix column on projects table
|
||||
merge_request: 17368
|
||||
author:
|
||||
type: added
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Adds sorting of packages at the project level
|
||||
merge_request: 15448
|
||||
author:
|
||||
type: added
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Fix viewing merge reqeust from a fork that's being deleted
|
||||
merge_request: 17894
|
||||
author:
|
||||
type: fixed
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
title: Update Pages to v1.11.0
|
||||
merge_request: 18010
|
||||
author:
|
||||
type: other
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Com
|
||||
module Gitlab
|
||||
module Patch
|
||||
module DrawRoute
|
||||
extend ::Gitlab::Utils::Override
|
||||
|
||||
override :draw_com
|
||||
def draw_com(routes_name)
|
||||
draw_route(route_path("com/config/routes/#{routes_name}.rb"))
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'fast_spec_helper'
|
||||
require 'com_spec_helper'
|
||||
|
||||
describe Gitlab::Patch::DrawRoute do
|
||||
subject do
|
||||
Class.new do
|
||||
include Gitlab::Patch::DrawRoute
|
||||
|
||||
def route_path(route_name)
|
||||
File.expand_path("../../../../../#{route_name}", __dir__)
|
||||
end
|
||||
end.new
|
||||
end
|
||||
|
||||
before do
|
||||
allow(subject).to receive(:instance_eval)
|
||||
end
|
||||
|
||||
it 'raises an error when nothing is drawn' do
|
||||
expect { subject.draw(:non_existing) }
|
||||
.to raise_error(described_class::RoutesNotFound)
|
||||
end
|
||||
end
|
||||
|
|
@ -22,6 +22,7 @@ module Gitlab
|
|||
require_dependency Rails.root.join('lib/gitlab/current_settings')
|
||||
require_dependency Rails.root.join('lib/gitlab/middleware/read_only')
|
||||
require_dependency Rails.root.join('lib/gitlab/middleware/basic_health_check')
|
||||
require_dependency Rails.root.join('config/light_settings')
|
||||
|
||||
# Settings in config/environments/* take precedence over those specified here.
|
||||
# Application configuration should go into files in config/initializers
|
||||
|
|
@ -62,6 +63,15 @@ module Gitlab
|
|||
config.paths['app/views'].unshift "#{config.root}/ee/app/views"
|
||||
end
|
||||
|
||||
if LightSettings.com?
|
||||
com_paths = config.eager_load_paths.each_with_object([]) do |path, memo|
|
||||
com_path = config.root.join('com', Pathname.new(path).relative_path_from(config.root))
|
||||
memo << com_path.to_s
|
||||
end
|
||||
|
||||
config.eager_load_paths.push(*com_paths)
|
||||
end
|
||||
|
||||
# Rake tasks ignore the eager loading settings, so we need to set the
|
||||
# autoload paths explicitly
|
||||
config.autoload_paths = config.eager_load_paths.dup
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'active_support/inflector'
|
||||
|
||||
module InjectComModule
|
||||
def prepend_if_com(constant, with_descendants: false)
|
||||
return unless Gitlab.com?
|
||||
|
||||
com_module = constant.constantize
|
||||
prepend(com_module)
|
||||
|
||||
if with_descendants
|
||||
descendants.each { |descendant| descendant.prepend(com_module) }
|
||||
end
|
||||
end
|
||||
|
||||
def extend_if_com(constant)
|
||||
extend(constant.constantize) if Gitlab.com?
|
||||
end
|
||||
|
||||
def include_if_com(constant)
|
||||
include(constant.constantize) if Gitlab.com?
|
||||
end
|
||||
end
|
||||
|
||||
Module.prepend(InjectComModule)
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class LightSettings
|
||||
GL_HOST ||= 'gitlab.com'
|
||||
GL_SUBDOMAIN_REGEX ||= %r{\A[a-z0-9]+\.gitlab\.com\z}.freeze
|
||||
|
||||
class << self
|
||||
def com?
|
||||
return Thread.current[:is_com] unless Thread.current[:is_com].nil?
|
||||
|
||||
Thread.current[:is_com] = host == GL_HOST || gl_subdomain?
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def config
|
||||
YAML.safe_load(File.read(settings_path), aliases: true)[Rails.env]
|
||||
end
|
||||
|
||||
def settings_path
|
||||
Rails.root.join('config', 'gitlab.yml')
|
||||
end
|
||||
|
||||
def host
|
||||
config['gitlab']['host']
|
||||
end
|
||||
|
||||
def gl_subdomain?
|
||||
GL_SUBDOMAIN_REGEX === host
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddPullMirrorBranchPrefixToProjects < ActiveRecord::Migration[5.2]
|
||||
DOWNTIME = false
|
||||
|
||||
def change
|
||||
add_column :projects, :pull_mirror_branch_prefix, :string, limit: 50
|
||||
end
|
||||
end
|
||||
|
|
@ -2922,6 +2922,7 @@ ActiveRecord::Schema.define(version: 2019_09_27_074328) do
|
|||
t.boolean "emails_disabled"
|
||||
t.integer "max_pages_size"
|
||||
t.integer "max_artifacts_size"
|
||||
t.string "pull_mirror_branch_prefix", limit: 50
|
||||
t.index "lower((name)::text)", name: "index_projects_on_lower_name"
|
||||
t.index ["archived", "pending_delete", "merge_requests_require_code_owner_approval"], name: "projects_requiring_code_owner_approval", where: "((pending_delete = false) AND (archived = false) AND (merge_requests_require_code_owner_approval = true))"
|
||||
t.index ["created_at"], name: "index_projects_on_created_at"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'pathname'
|
||||
require_relative '../config/light_settings'
|
||||
|
||||
module Gitlab
|
||||
def self.root
|
||||
|
|
@ -37,24 +38,18 @@ module Gitlab
|
|||
|
||||
COM_URL = 'https://gitlab.com'
|
||||
APP_DIRS_PATTERN = %r{^/?(app|config|ee|lib|spec|\(\w*\))}.freeze
|
||||
SUBDOMAIN_REGEX = %r{\Ahttps://[a-z0-9]+\.gitlab\.com\z}.freeze
|
||||
VERSION = File.read(root.join("VERSION")).strip.freeze
|
||||
INSTALLATION_TYPE = File.read(root.join("INSTALLATION_TYPE")).strip.freeze
|
||||
HTTP_PROXY_ENV_VARS = %w(http_proxy https_proxy HTTP_PROXY HTTPS_PROXY).freeze
|
||||
|
||||
def self.com?
|
||||
# Check `gl_subdomain?` as well to keep parity with gitlab.com
|
||||
Gitlab.config.gitlab.url == COM_URL || gl_subdomain?
|
||||
LightSettings.com?
|
||||
end
|
||||
|
||||
def self.org?
|
||||
Gitlab.config.gitlab.url == 'https://dev.gitlab.org'
|
||||
end
|
||||
|
||||
def self.gl_subdomain?
|
||||
SUBDOMAIN_REGEX === Gitlab.config.gitlab.url
|
||||
end
|
||||
|
||||
def self.dev_env_org_or_com?
|
||||
dev_env_or_com? || org?
|
||||
end
|
||||
|
|
@ -79,6 +74,10 @@ module Gitlab
|
|||
yield if ee?
|
||||
end
|
||||
|
||||
def self.com
|
||||
yield if com?
|
||||
end
|
||||
|
||||
def self.http_proxy_env?
|
||||
HTTP_PROXY_ENV_VARS.any? { |name| ENV[name] }
|
||||
end
|
||||
|
|
|
|||
|
|
@ -92,7 +92,7 @@ module Gitlab
|
|||
|
||||
def legacy_file_uploader
|
||||
strong_memoize(:legacy_file_uploader) do
|
||||
uploader = upload.build_uploader
|
||||
uploader = upload.retrieve_uploader
|
||||
uploader.retrieve_from_store!(File.basename(upload.path))
|
||||
uploader
|
||||
end
|
||||
|
|
|
|||
|
|
@ -139,6 +139,7 @@ excluded_attributes:
|
|||
- :mirror_trigger_builds
|
||||
- :only_mirror_protected_branches
|
||||
- :pull_mirror_available_overridden
|
||||
- :pull_mirror_branch_prefix
|
||||
- :mirror_overwrites_diverged_branches
|
||||
- :packages_enabled
|
||||
- :mirror_last_update_at
|
||||
|
|
|
|||
|
|
@ -68,7 +68,7 @@ module Gitlab
|
|||
yield(@project.avatar)
|
||||
else
|
||||
project_uploads_except_avatar(avatar_path).find_each(batch_size: UPLOADS_BATCH_SIZE) do |upload|
|
||||
yield(upload.build_uploader)
|
||||
yield(upload.retrieve_uploader)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6,11 +6,12 @@ module Gitlab
|
|||
module Patch
|
||||
module DrawRoute
|
||||
prepend_if_ee('EE::Gitlab::Patch::DrawRoute') # rubocop: disable Cop/InjectEnterpriseEditionModule
|
||||
prepend_if_com('Com::Gitlab::Patch::DrawRoute')
|
||||
|
||||
RoutesNotFound = Class.new(StandardError)
|
||||
|
||||
def draw(routes_name)
|
||||
drawn_any = draw_ce(routes_name) | draw_ee(routes_name)
|
||||
drawn_any = draw_ce(routes_name) | draw_ee(routes_name) | draw_com(routes_name)
|
||||
|
||||
drawn_any || raise(RoutesNotFound.new("Cannot find #{routes_name}"))
|
||||
end
|
||||
|
|
@ -23,6 +24,10 @@ module Gitlab
|
|||
true
|
||||
end
|
||||
|
||||
def draw_com(_)
|
||||
false
|
||||
end
|
||||
|
||||
def route_path(routes_name)
|
||||
Rails.root.join(routes_name)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -68,7 +68,7 @@ module Gitlab
|
|||
}
|
||||
|
||||
relation.find_each(find_params) do |upload|
|
||||
clean(upload.build_uploader, dry_run: dry_run)
|
||||
clean(upload.retrieve_uploader, dry_run: dry_run)
|
||||
sleep sleep_time if sleep_time
|
||||
rescue => err
|
||||
logger.error "failed to sanitize #{upload_ref(upload)}: #{err.message}"
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ module Gitlab
|
|||
end
|
||||
|
||||
def remote_object_exists?(upload)
|
||||
upload.build_uploader.file.exists?
|
||||
upload.retrieve_uploader.file.exists?
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2421,6 +2421,9 @@ msgstr ""
|
|||
msgid "Branch not loaded - %{branchId}"
|
||||
msgstr ""
|
||||
|
||||
msgid "Branch prefix"
|
||||
msgstr ""
|
||||
|
||||
msgid "BranchSwitcherPlaceholder|Search branches"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -3351,6 +3354,9 @@ msgstr ""
|
|||
msgid "ClusterIntegration|Choose a prefix to be used for your namespaces. Defaults to your project path."
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Choose the %{startLink}subnets%{endLink} in your VPC where your worker nodes will run."
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Choose which applications to install on your Kubernetes cluster. Helm Tiller is required to install any of the following applications."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -3390,9 +3396,15 @@ msgstr ""
|
|||
msgid "ClusterIntegration|Copy Service Token"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Could not load VPCs for the selected region"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Could not load regions from your AWS account"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Could not load subnets for the selected VPC"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Create Kubernetes cluster"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -3579,6 +3591,12 @@ msgstr ""
|
|||
msgid "ClusterIntegration|Loading Regions"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Loading VPCs"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Loading subnets"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Machine type"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -3591,6 +3609,9 @@ msgstr ""
|
|||
msgid "ClusterIntegration|No IAM Roles found"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|No VPCs found"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|No machine types matched your search"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -3603,6 +3624,9 @@ msgstr ""
|
|||
msgid "ClusterIntegration|No region found"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|No subnet found"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|No zones matched your search"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -3672,6 +3696,9 @@ msgstr ""
|
|||
msgid "ClusterIntegration|Search IAM Roles"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Search VPCs"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Search machine types"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -3681,12 +3708,24 @@ msgstr ""
|
|||
msgid "ClusterIntegration|Search regions"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Search subnets"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Search zones"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|See and edit the details for your Kubernetes cluster"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Select a VPC to choose a subnet"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Select a VPC to use for your EKS Cluster resources. To use a new VPC, first create one on %{startLink}Amazon Web Services%{endLink}."
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Select a region to choose a VPC"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Select machine type"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -3735,6 +3774,9 @@ msgstr ""
|
|||
msgid "ClusterIntegration|Specifying a domain will allow you to use Auto Review Apps and Auto Deploy stages for %{auto_devops_start}Auto DevOps%{auto_devops_end}. The domain should have a wildcard DNS configured matching the domain."
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Subnet"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|The Kubernetes certificate used to authenticate to the cluster."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -3783,6 +3825,9 @@ msgstr ""
|
|||
msgid "ClusterIntegration|Uses the Cloud Run, Istio, and HTTP Load Balancing addons for this cluster."
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|VPC"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntegration|Validating project billing status"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -3831,9 +3876,15 @@ msgstr ""
|
|||
msgid "ClusterIntegration|sign up"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntergation|Select a VPC"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntergation|Select a region"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntergation|Select a subnet"
|
||||
msgstr ""
|
||||
|
||||
msgid "ClusterIntergation|Select role name"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -8640,6 +8691,9 @@ msgstr ""
|
|||
msgid "Introducing Your Conversational Development Index"
|
||||
msgstr ""
|
||||
|
||||
msgid "Invalid Git ref"
|
||||
msgstr ""
|
||||
|
||||
msgid "Invalid Insights config file detected"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -10120,6 +10174,9 @@ msgstr ""
|
|||
msgid "Mirror user"
|
||||
msgstr ""
|
||||
|
||||
msgid "Mirrored branches will have this prefix. If you enabled 'Only mirror protected branches' you need to include this prefix on protected branches in this project or nothing will be mirrored."
|
||||
msgstr ""
|
||||
|
||||
msgid "Mirrored repositories"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -14890,6 +14947,12 @@ msgstr ""
|
|||
msgid "SortOptions|Start soon"
|
||||
msgstr ""
|
||||
|
||||
msgid "SortOptions|Type"
|
||||
msgstr ""
|
||||
|
||||
msgid "SortOptions|Version"
|
||||
msgstr ""
|
||||
|
||||
msgid "SortOptions|Weight"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
Settings.gitlab[:url] = "https://test.gitlab.com"
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
FactoryBot.define do
|
||||
factory :upload do
|
||||
model { build(:project) }
|
||||
model { create(:project) }
|
||||
size { 100.kilobytes }
|
||||
uploader { "AvatarUploader" }
|
||||
mount_point { :avatar }
|
||||
|
|
@ -11,23 +11,27 @@ FactoryBot.define do
|
|||
|
||||
# we should build a mount agnostic upload by default
|
||||
transient do
|
||||
filename { 'myfile.jpg' }
|
||||
filename { 'avatar.jpg' }
|
||||
end
|
||||
|
||||
# this needs to comply with RecordsUpload::Concern#upload_path
|
||||
path { File.join("uploads/-/system", model.class.underscore, mount_point.to_s, 'avatar.jpg') }
|
||||
path do
|
||||
uploader_instance = Object.const_get(uploader.to_s).new(model, mount_point)
|
||||
File.join(uploader_instance.store_dir, filename)
|
||||
end
|
||||
|
||||
trait :personal_snippet_upload do
|
||||
uploader { "PersonalFileUploader" }
|
||||
model { create(:personal_snippet) }
|
||||
path { File.join(secret, filename) }
|
||||
model { build(:personal_snippet) }
|
||||
uploader { "PersonalFileUploader" }
|
||||
secret { SecureRandom.hex }
|
||||
mount_point { nil }
|
||||
end
|
||||
|
||||
trait :issuable_upload do
|
||||
uploader { "FileUploader" }
|
||||
path { File.join(secret, filename) }
|
||||
secret { SecureRandom.hex }
|
||||
mount_point { nil }
|
||||
end
|
||||
|
||||
trait :with_file do
|
||||
|
|
@ -42,22 +46,23 @@ FactoryBot.define do
|
|||
end
|
||||
|
||||
trait :namespace_upload do
|
||||
model { build(:group) }
|
||||
model { create(:group) }
|
||||
path { File.join(secret, filename) }
|
||||
uploader { "NamespaceFileUploader" }
|
||||
secret { SecureRandom.hex }
|
||||
mount_point { nil }
|
||||
end
|
||||
|
||||
trait :favicon_upload do
|
||||
model { build(:appearance) }
|
||||
path { File.join(secret, filename) }
|
||||
model { create(:appearance) }
|
||||
uploader { "FaviconUploader" }
|
||||
secret { SecureRandom.hex }
|
||||
mount_point { :favicon }
|
||||
end
|
||||
|
||||
trait :attachment_upload do
|
||||
mount_point { :attachment }
|
||||
model { build(:note) }
|
||||
model { create(:note) }
|
||||
uploader { "AttachmentUploader" }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -5,10 +5,12 @@ ENV['IN_MEMORY_APPLICATION_SETTINGS'] = 'true'
|
|||
|
||||
require 'active_support/dependencies'
|
||||
require_relative '../config/initializers/0_inject_enterprise_edition_module'
|
||||
require_relative '../config/initializers/0_inject_com_module'
|
||||
require_relative '../config/settings'
|
||||
require_relative 'support/rspec'
|
||||
require 'active_support/all'
|
||||
|
||||
ActiveSupport::Dependencies.autoload_paths << 'lib'
|
||||
ActiveSupport::Dependencies.autoload_paths << 'ee/lib'
|
||||
ActiveSupport::Dependencies.autoload_paths << 'com/lib'
|
||||
ActiveSupport::XmlMini.backend = 'Nokogiri'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,31 @@
|
|||
# frozen_string_literal: true
|
||||
require 'spec_helper'
|
||||
|
||||
# This is a feature spec because the problems arrise when rendering the view for
|
||||
# an actual project for which the repository is removed but the cached not
|
||||
# updated.
|
||||
# This can occur when the fork a merge request is created from is in the process
|
||||
# of being destroyed.
|
||||
describe 'User views merged merge request from deleted fork' do
|
||||
include ProjectForksHelper
|
||||
|
||||
let(:project) { create(:project, :repository) }
|
||||
let(:source_project) { fork_project(project, nil, repository: true) }
|
||||
let(:user) { project.owner }
|
||||
let!(:merge_request) { create(:merge_request, :merged, source_project: source_project, target_project: project) }
|
||||
|
||||
before do
|
||||
sign_in user
|
||||
|
||||
fork_owner = source_project.namespace.owners.first
|
||||
# Place the source_project in the weird in between state
|
||||
source_project.update_attribute(:pending_delete, true)
|
||||
Projects::DestroyService.new(source_project, fork_owner, {}).__send__(:trash_repositories!)
|
||||
end
|
||||
|
||||
it 'correctly shows the merge request' do
|
||||
visit(merge_request_path(merge_request))
|
||||
|
||||
expect(page).to have_content(merge_request.title)
|
||||
end
|
||||
end
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`Commit pipeline status component when polling is not successful renders not found CI icon without loader 1`] = `
|
||||
<div
|
||||
class="ci-status-link"
|
||||
>
|
||||
<a>
|
||||
<ciicon-stub
|
||||
aria-label="Pipeline: not found"
|
||||
cssclasses=""
|
||||
data-container="body"
|
||||
data-original-title="Pipeline: not found"
|
||||
size="24"
|
||||
status="[object Object]"
|
||||
title=""
|
||||
/>
|
||||
</a>
|
||||
</div>
|
||||
`;
|
||||
|
||||
exports[`Commit pipeline status component when polling is successful renders CI icon without loader 1`] = `
|
||||
<div
|
||||
class="ci-status-link"
|
||||
>
|
||||
<a
|
||||
href="/frontend-fixtures/pipelines-project/pipelines/47"
|
||||
>
|
||||
<ciicon-stub
|
||||
aria-label="Pipeline: pending"
|
||||
cssclasses=""
|
||||
data-container="body"
|
||||
data-original-title="Pipeline: pending"
|
||||
size="24"
|
||||
status="[object Object]"
|
||||
title=""
|
||||
/>
|
||||
</a>
|
||||
</div>
|
||||
`;
|
||||
|
|
@ -2,6 +2,7 @@ import Visibility from 'visibilityjs';
|
|||
import { GlLoadingIcon } from '@gitlab/ui';
|
||||
import Poll from '~/lib/utils/poll';
|
||||
import flash from '~/flash';
|
||||
import CiIcon from '~/vue_shared/components/ci_icon.vue';
|
||||
import CommitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue';
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import { getJSONFixture } from '../helpers/fixtures';
|
||||
|
|
@ -36,6 +37,10 @@ describe('Commit pipeline status component', () => {
|
|||
});
|
||||
};
|
||||
|
||||
const findLoader = () => wrapper.find(GlLoadingIcon);
|
||||
const findLink = () => wrapper.find('a');
|
||||
const findCiIcon = () => findLink().find(CiIcon);
|
||||
|
||||
afterEach(() => {
|
||||
wrapper.destroy();
|
||||
wrapper = null;
|
||||
|
|
@ -111,14 +116,14 @@ describe('Commit pipeline status component', () => {
|
|||
|
||||
it('shows the loading icon at start', () => {
|
||||
createComponent();
|
||||
expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
|
||||
expect(findLoader().exists()).toBe(true);
|
||||
|
||||
pollConfig.successCallback({
|
||||
data: { pipelines: [] },
|
||||
});
|
||||
|
||||
return wrapper.vm.$nextTick().then(() => {
|
||||
expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
|
||||
expect(findLoader().exists()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -130,8 +135,17 @@ describe('Commit pipeline status component', () => {
|
|||
return wrapper.vm.$nextTick();
|
||||
});
|
||||
|
||||
it('renders CI icon without loader', () => {
|
||||
expect(wrapper.element).toMatchSnapshot();
|
||||
it('does not render loader', () => {
|
||||
expect(findLoader().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('renders link with href', () => {
|
||||
expect(findLink().attributes('href')).toEqual(mockCiStatus.details_path);
|
||||
});
|
||||
|
||||
it('renders CI icon', () => {
|
||||
expect(findCiIcon().attributes('data-original-title')).toEqual('Pipeline: pending');
|
||||
expect(findCiIcon().props('status')).toEqual(mockCiStatus);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -140,8 +154,21 @@ describe('Commit pipeline status component', () => {
|
|||
pollConfig.errorCallback();
|
||||
});
|
||||
|
||||
it('renders not found CI icon without loader', () => {
|
||||
expect(wrapper.element).toMatchSnapshot();
|
||||
it('does not render loader', () => {
|
||||
expect(findLoader().exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('renders link with href', () => {
|
||||
expect(findLink().attributes('href')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('renders not found CI icon', () => {
|
||||
expect(findCiIcon().attributes('data-original-title')).toEqual('Pipeline: not found');
|
||||
expect(findCiIcon().props('status')).toEqual({
|
||||
text: 'not found',
|
||||
icon: 'status_notfound',
|
||||
group: 'notfound',
|
||||
});
|
||||
});
|
||||
|
||||
it('displays flash error message', () => {
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import Vue from 'vue';
|
|||
import EksClusterConfigurationForm from '~/create_cluster/eks_cluster/components/eks_cluster_configuration_form.vue';
|
||||
import RegionDropdown from '~/create_cluster/eks_cluster/components/region_dropdown.vue';
|
||||
|
||||
import eksClusterFormState from '~/create_cluster/eks_cluster/store/state';
|
||||
import clusterDropdownStoreState from '~/create_cluster/eks_cluster/store/cluster_dropdown/state';
|
||||
|
||||
const localVue = createLocalVue();
|
||||
|
|
@ -12,29 +13,59 @@ localVue.use(Vuex);
|
|||
describe('EksClusterConfigurationForm', () => {
|
||||
let store;
|
||||
let actions;
|
||||
let state;
|
||||
let regionsState;
|
||||
let vpcsState;
|
||||
let subnetsState;
|
||||
let vpcsActions;
|
||||
let regionsActions;
|
||||
let subnetsActions;
|
||||
let vm;
|
||||
|
||||
beforeEach(() => {
|
||||
state = eksClusterFormState();
|
||||
actions = {
|
||||
setRegion: jest.fn(),
|
||||
setVpc: jest.fn(),
|
||||
setSubnet: jest.fn(),
|
||||
};
|
||||
regionsActions = {
|
||||
fetchItems: jest.fn(),
|
||||
};
|
||||
vpcsActions = {
|
||||
fetchItems: jest.fn(),
|
||||
};
|
||||
subnetsActions = {
|
||||
fetchItems: jest.fn(),
|
||||
};
|
||||
regionsState = {
|
||||
...clusterDropdownStoreState(),
|
||||
};
|
||||
vpcsState = {
|
||||
...clusterDropdownStoreState(),
|
||||
};
|
||||
subnetsState = {
|
||||
...clusterDropdownStoreState(),
|
||||
};
|
||||
store = new Vuex.Store({
|
||||
state,
|
||||
actions,
|
||||
modules: {
|
||||
vpcs: {
|
||||
namespaced: true,
|
||||
state: vpcsState,
|
||||
actions: vpcsActions,
|
||||
},
|
||||
regions: {
|
||||
namespaced: true,
|
||||
state: regionsState,
|
||||
actions: regionsActions,
|
||||
},
|
||||
subnets: {
|
||||
namespaced: true,
|
||||
state: subnetsState,
|
||||
actions: subnetsActions,
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
|
@ -51,6 +82,8 @@ describe('EksClusterConfigurationForm', () => {
|
|||
});
|
||||
|
||||
const findRegionDropdown = () => vm.find(RegionDropdown);
|
||||
const findVpcDropdown = () => vm.find('[field-id="eks-vpc"]');
|
||||
const findSubnetDropdown = () => vm.find('[field-id="eks-subnet"]');
|
||||
|
||||
describe('when mounted', () => {
|
||||
it('fetches available regions', () => {
|
||||
|
|
@ -62,16 +95,72 @@ describe('EksClusterConfigurationForm', () => {
|
|||
regionsState.isLoadingItems = true;
|
||||
|
||||
return Vue.nextTick().then(() => {
|
||||
expect(findRegionDropdown().props('loading')).toEqual(regionsState.isLoadingItems);
|
||||
expect(findRegionDropdown().props('loading')).toBe(regionsState.isLoadingItems);
|
||||
});
|
||||
});
|
||||
|
||||
it('sets regions to RegionDropdown regions property', () => {
|
||||
expect(findRegionDropdown().props('regions')).toEqual(regionsState.items);
|
||||
expect(findRegionDropdown().props('regions')).toBe(regionsState.items);
|
||||
});
|
||||
|
||||
it('sets loadingRegionsError to RegionDropdown error property', () => {
|
||||
expect(findRegionDropdown().props('error')).toEqual(regionsState.loadingItemsError);
|
||||
expect(findRegionDropdown().props('error')).toBe(regionsState.loadingItemsError);
|
||||
});
|
||||
|
||||
it('disables VpcDropdown when no region is selected', () => {
|
||||
expect(findVpcDropdown().props('disabled')).toBe(true);
|
||||
});
|
||||
|
||||
it('enables VpcDropdown when no region is selected', () => {
|
||||
state.selectedRegion = { name: 'west-1 ' };
|
||||
|
||||
return Vue.nextTick().then(() => {
|
||||
expect(findVpcDropdown().props('disabled')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('sets isLoadingVpcs to VpcDropdown loading property', () => {
|
||||
vpcsState.isLoadingItems = true;
|
||||
|
||||
return Vue.nextTick().then(() => {
|
||||
expect(findVpcDropdown().props('loading')).toBe(vpcsState.isLoadingItems);
|
||||
});
|
||||
});
|
||||
|
||||
it('sets vpcs to VpcDropdown items property', () => {
|
||||
expect(findVpcDropdown().props('items')).toBe(vpcsState.items);
|
||||
});
|
||||
|
||||
it('sets loadingVpcsError to VpcDropdown hasErrors property', () => {
|
||||
expect(findVpcDropdown().props('hasErrors')).toBe(vpcsState.loadingItemsError);
|
||||
});
|
||||
|
||||
it('disables SubnetDropdown when no vpc is selected', () => {
|
||||
expect(findSubnetDropdown().props('disabled')).toBe(true);
|
||||
});
|
||||
|
||||
it('enables SubnetDropdown when a vpc is selected', () => {
|
||||
state.selectedVpc = { name: 'vpc-1 ' };
|
||||
|
||||
return Vue.nextTick().then(() => {
|
||||
expect(findSubnetDropdown().props('disabled')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('sets isLoadingSubnets to SubnetDropdown loading property', () => {
|
||||
subnetsState.isLoadingItems = true;
|
||||
|
||||
return Vue.nextTick().then(() => {
|
||||
expect(findSubnetDropdown().props('loading')).toBe(subnetsState.isLoadingItems);
|
||||
});
|
||||
});
|
||||
|
||||
it('sets subnets to SubnetDropdown items property', () => {
|
||||
expect(findSubnetDropdown().props('items')).toBe(subnetsState.items);
|
||||
});
|
||||
|
||||
it('sets loadingSubnetsError to SubnetDropdown hasErrors property', () => {
|
||||
expect(findSubnetDropdown().props('hasErrors')).toBe(subnetsState.loadingItemsError);
|
||||
});
|
||||
|
||||
describe('when region is selected', () => {
|
||||
|
|
@ -84,5 +173,37 @@ describe('EksClusterConfigurationForm', () => {
|
|||
it('dispatches setRegion action', () => {
|
||||
expect(actions.setRegion).toHaveBeenCalledWith(expect.anything(), { region }, undefined);
|
||||
});
|
||||
|
||||
it('fetches available vpcs', () => {
|
||||
expect(vpcsActions.fetchItems).toHaveBeenCalledWith(expect.anything(), { region }, undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when vpc is selected', () => {
|
||||
const vpc = { name: 'vpc-1' };
|
||||
|
||||
beforeEach(() => {
|
||||
findVpcDropdown().vm.$emit('input', vpc);
|
||||
});
|
||||
|
||||
it('dispatches setVpc action', () => {
|
||||
expect(actions.setVpc).toHaveBeenCalledWith(expect.anything(), { vpc }, undefined);
|
||||
});
|
||||
|
||||
it('dispatches fetchSubnets action', () => {
|
||||
expect(subnetsActions.fetchItems).toHaveBeenCalledWith(expect.anything(), { vpc }, undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when a subnet is selected', () => {
|
||||
const subnet = { name: 'subnet-1' };
|
||||
|
||||
beforeEach(() => {
|
||||
findSubnetDropdown().vm.$emit('input', subnet);
|
||||
});
|
||||
|
||||
it('dispatches setSubnet action', () => {
|
||||
expect(actions.setSubnet).toHaveBeenCalledWith(expect.anything(), { subnet }, undefined);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,17 +1,28 @@
|
|||
import testAction from 'helpers/vuex_action_helper';
|
||||
|
||||
import createState from '~/create_cluster/eks_cluster/store/state';
|
||||
import * as types from '~/create_cluster/eks_cluster/store/mutation_types';
|
||||
import * as actions from '~/create_cluster/eks_cluster/store/actions';
|
||||
import { SET_REGION, SET_VPC, SET_SUBNET } from '~/create_cluster/eks_cluster/store/mutation_types';
|
||||
|
||||
describe('EKS Cluster Store Actions', () => {
|
||||
describe('setRegion', () => {
|
||||
it(`commits ${types.SET_REGION} mutation`, () => {
|
||||
const region = { name: 'west-1' };
|
||||
let region;
|
||||
let vpc;
|
||||
let subnet;
|
||||
|
||||
testAction(actions.setRegion, { region }, createState(), [
|
||||
{ type: types.SET_REGION, payload: { region } },
|
||||
]);
|
||||
});
|
||||
beforeEach(() => {
|
||||
region = { name: 'regions-1' };
|
||||
vpc = { name: 'vpc-1' };
|
||||
subnet = { name: 'subnet-1' };
|
||||
});
|
||||
|
||||
it.each`
|
||||
action | mutation | payload | payloadDescription
|
||||
${'setRegion'} | ${SET_REGION} | ${{ region }} | ${'region'}
|
||||
${'setVpc'} | ${SET_VPC} | ${{ vpc }} | ${'vpc'}
|
||||
${'setSubnet'} | ${SET_SUBNET} | ${{ subnet }} | ${'subnet'}
|
||||
`(`$action commits $mutation with $payloadDescription payload`, data => {
|
||||
const { action, mutation, payload } = data;
|
||||
|
||||
testAction(actions[action], payload, createState(), [{ type: mutation, payload }]);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,19 +1,26 @@
|
|||
import { SET_REGION } from '~/create_cluster/eks_cluster/store/mutation_types';
|
||||
import { SET_REGION, SET_VPC, SET_SUBNET } from '~/create_cluster/eks_cluster/store/mutation_types';
|
||||
import createState from '~/create_cluster/eks_cluster/store/state';
|
||||
import mutations from '~/create_cluster/eks_cluster/store/mutations';
|
||||
|
||||
describe('Create EKS cluster store mutations', () => {
|
||||
let state;
|
||||
let region;
|
||||
let vpc;
|
||||
let subnet;
|
||||
|
||||
beforeEach(() => {
|
||||
region = { name: 'regions-1' };
|
||||
vpc = { name: 'vpc-1' };
|
||||
subnet = { name: 'subnet-1' };
|
||||
|
||||
state = createState();
|
||||
});
|
||||
|
||||
it.each`
|
||||
mutation | mutatedProperty | payload | expectedValue | expectedValueDescription
|
||||
${SET_REGION} | ${'selectedRegion'} | ${{ region }} | ${region} | ${'selected region payload'}
|
||||
${SET_VPC} | ${'selectedVpc'} | ${{ vpc }} | ${vpc} | ${'selected vpc payload'}
|
||||
${SET_SUBNET} | ${'selectedSubnet'} | ${{ subnet }} | ${subnet} | ${'selected sybnet payload'}
|
||||
`(`$mutation sets $mutatedProperty to $expectedValueDescription`, data => {
|
||||
const { mutation, mutatedProperty, payload, expectedValue } = data;
|
||||
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ describe Gitlab::BackgroundMigration::LegacyUploadMover do
|
|||
|
||||
if with_file
|
||||
upload = create(:upload, :with_file, :attachment_upload, params)
|
||||
model.update(attachment: upload.build_uploader)
|
||||
model.update(attachment: upload.retrieve_uploader)
|
||||
model.attachment.upload
|
||||
else
|
||||
create(:upload, :attachment_upload, params)
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ describe Gitlab::BackgroundMigration::LegacyUploadsMigrator do
|
|||
|
||||
if with_file
|
||||
upload = create(:upload, :with_file, :attachment_upload, params)
|
||||
model.update(attachment: upload.build_uploader)
|
||||
model.update(attachment: upload.retrieve_uploader)
|
||||
model.attachment.upload
|
||||
else
|
||||
create(:upload, :attachment_upload, params)
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ describe Gitlab::ImportExport::UploadsManager do
|
|||
it 'restores the file' do
|
||||
manager.restore
|
||||
|
||||
expect(project.uploads.map { |u| u.build_uploader.filename }).to include('dummy.txt')
|
||||
expect(project.uploads.map { |u| u.retrieve_uploader.filename }).to include('dummy.txt')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ describe Gitlab::ImportExport::UploadsRestorer do
|
|||
it 'copies the uploads to the project path' do
|
||||
subject.restore
|
||||
|
||||
expect(project.uploads.map { |u| u.build_uploader.filename }).to include('dummy.txt')
|
||||
expect(project.uploads.map { |u| u.retrieve_uploader.filename }).to include('dummy.txt')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -43,7 +43,7 @@ describe Gitlab::ImportExport::UploadsRestorer do
|
|||
it 'copies the uploads to the project path' do
|
||||
subject.restore
|
||||
|
||||
expect(project.uploads.map { |u| u.build_uploader.filename }).to include('dummy.txt')
|
||||
expect(project.uploads.map { |u| u.retrieve_uploader.filename }).to include('dummy.txt')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ describe Gitlab::Sanitizers::Exif do
|
|||
end
|
||||
|
||||
describe '#clean' do
|
||||
let(:uploader) { create(:upload, :with_file, :issuable_upload).build_uploader }
|
||||
let(:uploader) { create(:upload, :with_file, :issuable_upload).retrieve_uploader }
|
||||
|
||||
context "no dry run" do
|
||||
it "removes exif from the image" do
|
||||
|
|
|
|||
|
|
@ -71,26 +71,30 @@ describe Gitlab do
|
|||
end
|
||||
|
||||
describe '.com?' do
|
||||
before do
|
||||
Thread.current[:is_com] = nil
|
||||
end
|
||||
|
||||
it 'is true when on GitLab.com' do
|
||||
stub_config_setting(url: 'https://gitlab.com')
|
||||
allow(LightSettings).to receive(:host).and_return('gitlab.com')
|
||||
|
||||
expect(described_class.com?).to eq true
|
||||
end
|
||||
|
||||
it 'is true when on staging' do
|
||||
stub_config_setting(url: 'https://staging.gitlab.com')
|
||||
allow(LightSettings).to receive(:host).and_return('staging.gitlab.com')
|
||||
|
||||
expect(described_class.com?).to eq true
|
||||
end
|
||||
|
||||
it 'is true when on other gitlab subdomain' do
|
||||
stub_config_setting(url: 'https://example.gitlab.com')
|
||||
allow(LightSettings).to receive(:host).and_return('example.gitlab.com')
|
||||
|
||||
expect(described_class.com?).to eq true
|
||||
end
|
||||
|
||||
it 'is false when not on GitLab.com' do
|
||||
stub_config_setting(url: 'http://example.com')
|
||||
allow(LightSettings).to receive(:host).and_return('example.com')
|
||||
|
||||
expect(described_class.com?).to eq false
|
||||
end
|
||||
|
|
|
|||
|
|
@ -149,6 +149,17 @@ describe CommitCollection do
|
|||
|
||||
collection.enrich!
|
||||
end
|
||||
|
||||
it 'returns the original commit if the commit could not be lazy loaded' do
|
||||
collection = described_class.new(project, [hash_commit])
|
||||
unexisting_lazy_commit = Commit.lazy(project, Gitlab::Git::BLANK_SHA)
|
||||
|
||||
expect(Commit).to receive(:lazy).with(project, hash_commit.id).and_return(unexisting_lazy_commit)
|
||||
|
||||
collection.enrich!
|
||||
|
||||
expect(collection.commits).to contain_exactly(hash_commit)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
require 'spec_helper'
|
||||
|
||||
describe Upload do
|
||||
describe 'assocations' do
|
||||
describe 'associations' do
|
||||
it { is_expected.to belong_to(:model) }
|
||||
end
|
||||
|
||||
|
|
@ -107,6 +107,52 @@ describe Upload do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#build_uploader' do
|
||||
it 'returns a uploader object with current upload associated with it' do
|
||||
subject = build(:upload)
|
||||
uploader = subject.build_uploader
|
||||
|
||||
expect(uploader.upload).to eq(subject)
|
||||
expect(uploader.mounted_as).to eq(subject.send(:mount_point))
|
||||
expect(uploader.file).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
describe '#retrieve_uploader' do
|
||||
it 'returns a uploader object with current uploader associated with and cache retrieved' do
|
||||
subject = build(:upload)
|
||||
uploader = subject.retrieve_uploader
|
||||
|
||||
expect(uploader.upload).to eq(subject)
|
||||
expect(uploader.mounted_as).to eq(subject.send(:mount_point))
|
||||
expect(uploader.file).not_to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
describe '#needs_checksum?' do
|
||||
context 'with local storage' do
|
||||
it 'returns true when no checksum exists' do
|
||||
subject = create(:upload, :with_file, checksum: nil)
|
||||
|
||||
expect(subject.needs_checksum?).to be_truthy
|
||||
end
|
||||
|
||||
it 'returns false when checksum is already present' do
|
||||
subject = create(:upload, :with_file, checksum: 'something')
|
||||
|
||||
expect(subject.needs_checksum?).to be_falsey
|
||||
end
|
||||
end
|
||||
|
||||
context 'with remote storage' do
|
||||
subject { build(:upload, :object_storage) }
|
||||
|
||||
it 'returns false' do
|
||||
expect(subject.needs_checksum?).to be_falsey
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#exist?' do
|
||||
it 'returns true when the file exists' do
|
||||
upload = described_class.new(path: __FILE__, store: ObjectStorage::Store::LOCAL)
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ describe Uploads::Fog do
|
|||
subject { data_store.delete_keys(keys) }
|
||||
|
||||
before do
|
||||
uploads.each { |upload| upload.build_uploader.migrate!(2) }
|
||||
uploads.each { |upload| upload.retrieve_uploader.migrate!(2) }
|
||||
end
|
||||
|
||||
it 'deletes multiple data' do
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ shared_examples 'handle uploads' do
|
|||
|
||||
context "when neither the uploader nor the model exists" do
|
||||
before do
|
||||
allow_any_instance_of(Upload).to receive(:build_uploader).and_return(nil)
|
||||
allow_any_instance_of(Upload).to receive(:retrieve_uploader).and_return(nil)
|
||||
allow(controller).to receive(:find_model).and_return(nil)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -41,7 +41,8 @@ shared_examples_for 'model with uploads' do |supports_fileuploads|
|
|||
end
|
||||
|
||||
it 'deletes remote files' do
|
||||
expect_any_instance_of(Uploads::Fog).to receive(:delete_keys).with(uploads.map(&:path))
|
||||
expected_array = array_including(*uploads.map(&:path))
|
||||
expect_any_instance_of(Uploads::Fog).to receive(:delete_keys).with(expected_array)
|
||||
|
||||
model_object.destroy
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ require 'spec_helper'
|
|||
describe FileUploader do
|
||||
let(:group) { create(:group, name: 'awesome') }
|
||||
let(:project) { create(:project, :legacy_storage, namespace: group, name: 'project') }
|
||||
let(:uploader) { described_class.new(project) }
|
||||
let(:uploader) { described_class.new(project, :avatar) }
|
||||
let(:upload) { double(model: project, path: 'secret/foo.jpg') }
|
||||
|
||||
subject { uploader }
|
||||
|
|
@ -184,6 +184,14 @@ describe FileUploader do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#replace_file_without_saving!' do
|
||||
let(:replacement) { Tempfile.create('replacement.jpg') }
|
||||
|
||||
it 'replaces an existing file without changing its metadata' do
|
||||
expect { subject.replace_file_without_saving! CarrierWave::SanitizedFile.new(replacement) }.not_to change { subject.upload }
|
||||
end
|
||||
end
|
||||
|
||||
context 'when remote file is used' do
|
||||
let(:temp_file) { Tempfile.new("test") }
|
||||
|
||||
|
|
|
|||
|
|
@ -69,6 +69,16 @@ describe GitlabUploader do
|
|||
end
|
||||
end
|
||||
|
||||
describe '#replace_file_without_saving!' do
|
||||
it 'allows file to be replaced without triggering any callbacks' do
|
||||
new_file = CarrierWave::SanitizedFile.new(Tempfile.new)
|
||||
|
||||
expect(subject).not_to receive(:with_callbacks)
|
||||
|
||||
subject.replace_file_without_saving!(new_file)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#open' do
|
||||
context 'when trace is stored in File storage' do
|
||||
context 'when file exists' do
|
||||
|
|
|
|||
|
|
@ -42,33 +42,23 @@ describe ObjectStorage::MigrateUploadsWorker, :sidekiq do
|
|||
end
|
||||
|
||||
describe '.sanity_check!' do
|
||||
shared_examples 'raises a SanityCheckError' do
|
||||
shared_examples 'raises a SanityCheckError' do |expected_message|
|
||||
let(:mount_point) { nil }
|
||||
|
||||
it do
|
||||
expect { described_class.sanity_check!(uploads, model_class, mount_point) }
|
||||
.to raise_error(described_class::SanityCheckError)
|
||||
.to raise_error(described_class::SanityCheckError).with_message(expected_message)
|
||||
end
|
||||
end
|
||||
|
||||
before do
|
||||
stub_const("WrongModel", Class.new)
|
||||
end
|
||||
|
||||
context 'uploader types mismatch' do
|
||||
let!(:outlier) { create(:upload, uploader: 'GitlabUploader') }
|
||||
|
||||
include_examples 'raises a SanityCheckError'
|
||||
end
|
||||
|
||||
context 'model types mismatch' do
|
||||
let!(:outlier) { create(:upload, model_type: 'WrongModel') }
|
||||
|
||||
include_examples 'raises a SanityCheckError'
|
||||
include_examples 'raises a SanityCheckError', /Multiple uploaders found/
|
||||
end
|
||||
|
||||
context 'mount point not found' do
|
||||
include_examples 'raises a SanityCheckError' do
|
||||
include_examples 'raises a SanityCheckError', /Mount point [a-z:]+ not found in/ do
|
||||
let(:mount_point) { :potato }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in New Issue