From b0139a824fba85e5b71e69f2c99d423700ff76cc Mon Sep 17 00:00:00 2001 From: GitLab Bot Date: Thu, 30 Jun 2022 12:09:03 +0000 Subject: [PATCH] Add latest changes from gitlab-org/gitlab@master --- .../clusters_list/components/agent_table.vue | 4 +- .../javascripts/clusters_list/constants.js | 6 +- .../cloudsql/create_instance_form.vue | 132 +++++++++++ .../components/cloudsql/instance_table.vue | 75 ++++++ .../components/databases/service_table.vue | 221 ++++++++++++++++++ .../work_items/components/item_title.vue | 9 +- .../components/work_item_detail.vue | 9 +- .../components/work_item_weight.vue | 130 ++++++++++- .../local_update_work_item.mutation.graphql | 2 +- .../work_items/graphql/provider.js | 23 +- .../work_items/graphql/typedefs.graphql | 5 +- app/assets/javascripts/work_items/index.js | 1 + .../stylesheets/_page_specific_files.scss | 1 - app/assets/stylesheets/framework/icons.scss | 2 +- .../stylesheets/page_bundles/work_items.scss | 15 ++ app/assets/stylesheets/pages/work_items.scss | 4 - app/controllers/projects/logs_controller.rb | 103 -------- app/models/project_export_job.rb | 1 + .../projects/import_export/relation_export.rb | 22 ++ .../import_export/relation_export_upload.rb | 19 ++ app/serializers/environment_entity.rb | 21 -- app/services/pod_logs/base_service.rb | 91 -------- .../pod_logs/elasticsearch_service.rb | 98 -------- app/services/pod_logs/kubernetes_service.rb | 151 ------------ app/views/projects/issues/show.html.haml | 1 + app/views/projects/work_items/index.html.haml | 1 + config/application.rb | 1 + db/docs/project_relation_export_uploads.yml | 9 + db/docs/project_relation_exports.yml | 9 + ...9182308_create_project_relation_exports.rb | 20 ++ ..._create_project_relation_export_uploads.rb | 15 ++ db/schema_migrations/20220619182308 | 1 + db/schema_migrations/20220619184931 | 1 + db/structure.sql | 63 +++++ doc/development/event_store.md | 21 ++ doc/update/index.md | 2 +- .../project/integrations/webhook_events.md | 5 + lib/gitlab/database/gitlab_schemas.yml | 2 + lib/gitlab/diff/file.rb | 2 +- lib/gitlab/elasticsearch/logs/lines.rb | 157 ------------- lib/gitlab/elasticsearch/logs/pods.rb | 70 ------ locale/gitlab.pot | 195 ++++++++++------ .../projects/import_export/export_relation.rb | 11 + .../gitlab/import_export/labels.tar.gz | Bin 0 -> 768 bytes .../components/agent_table_spec.js | 8 +- .../cloudsql/create_instance_form_spec.js | 103 ++++++++ .../cloudsql/instance_table_spec.js | 65 ++++++ .../databases/service_table_spec.js | 44 ++++ .../components/app_spec.js | 77 +++--- .../work_items/components/item_title_spec.js | 2 - .../components/work_item_weight_spec.js | 152 ++++++++++-- spec/lib/gitlab/diff/file_spec.rb | 8 + spec/models/project_export_job_spec.rb | 13 +- .../import_export/relation_export_spec.rb | 23 ++ .../relation_export_upload_spec.rb | 25 ++ spec/support/matchers/event_store.rb | 11 +- 56 files changed, 1377 insertions(+), 885 deletions(-) create mode 100644 app/assets/javascripts/google_cloud/components/cloudsql/create_instance_form.vue create mode 100644 app/assets/javascripts/google_cloud/components/cloudsql/instance_table.vue create mode 100644 app/assets/javascripts/google_cloud/components/databases/service_table.vue create mode 100644 app/assets/stylesheets/page_bundles/work_items.scss delete mode 100644 app/assets/stylesheets/pages/work_items.scss delete mode 100644 app/controllers/projects/logs_controller.rb create mode 100644 app/models/projects/import_export/relation_export.rb create mode 100644 app/models/projects/import_export/relation_export_upload.rb delete mode 100644 app/services/pod_logs/base_service.rb delete mode 100644 app/services/pod_logs/elasticsearch_service.rb delete mode 100644 app/services/pod_logs/kubernetes_service.rb create mode 100644 db/docs/project_relation_export_uploads.yml create mode 100644 db/docs/project_relation_exports.yml create mode 100644 db/migrate/20220619182308_create_project_relation_exports.rb create mode 100644 db/migrate/20220619184931_create_project_relation_export_uploads.rb create mode 100644 db/schema_migrations/20220619182308 create mode 100644 db/schema_migrations/20220619184931 delete mode 100644 lib/gitlab/elasticsearch/logs/lines.rb delete mode 100644 lib/gitlab/elasticsearch/logs/pods.rb create mode 100644 spec/factories/projects/import_export/export_relation.rb create mode 100644 spec/fixtures/gitlab/import_export/labels.tar.gz create mode 100644 spec/frontend/google_cloud/components/cloudsql/create_instance_form_spec.js create mode 100644 spec/frontend/google_cloud/components/cloudsql/instance_table_spec.js create mode 100644 spec/frontend/google_cloud/components/databases/service_table_spec.js create mode 100644 spec/models/projects/import_export/relation_export_spec.rb create mode 100644 spec/models/projects/import_export/relation_export_upload_spec.rb diff --git a/app/assets/javascripts/clusters_list/components/agent_table.vue b/app/assets/javascripts/clusters_list/components/agent_table.vue index 496baf8cb08..e0e3b961c51 100644 --- a/app/assets/javascripts/clusters_list/components/agent_table.vue +++ b/app/assets/javascripts/clusters_list/components/agent_table.vue @@ -58,7 +58,7 @@ export default { }, computed: { fields() { - const tdClass = 'gl-py-5!'; + const tdClass = 'gl-pt-3! gl-pb-4! gl-vertical-align-middle!'; return [ { key: 'name', @@ -184,7 +184,7 @@ export default { data-testid="cluster-agent-connection-status" > - {{ $options.AGENT_STATUSES[item.status].name }} diff --git a/app/assets/javascripts/clusters_list/constants.js b/app/assets/javascripts/clusters_list/constants.js index 10e71513065..7bc8a1a7304 100644 --- a/app/assets/javascripts/clusters_list/constants.js +++ b/app/assets/javascripts/clusters_list/constants.js @@ -145,8 +145,8 @@ export const AGENT_STATUSES = { }, inactive: { name: s__('ClusterAgents|Not connected'), - icon: 'severity-critical', - class: 'text-danger-800', + icon: 'status-alert', + class: 'text-danger-500', tooltip: { title: s__('ClusterAgents|Agent might not be connected to GitLab'), body: sprintf( @@ -159,7 +159,7 @@ export const AGENT_STATUSES = { unused: { name: s__('ClusterAgents|Never connected'), icon: 'status-neutral', - class: 'text-secondary-400', + class: 'text-secondary-500', tooltip: { title: s__('ClusterAgents|Agent never connected to GitLab'), body: s__('ClusterAgents|Make sure you are using a valid token.'), diff --git a/app/assets/javascripts/google_cloud/components/cloudsql/create_instance_form.vue b/app/assets/javascripts/google_cloud/components/cloudsql/create_instance_form.vue new file mode 100644 index 00000000000..0ac561b6132 --- /dev/null +++ b/app/assets/javascripts/google_cloud/components/cloudsql/create_instance_form.vue @@ -0,0 +1,132 @@ + + diff --git a/app/assets/javascripts/google_cloud/components/cloudsql/instance_table.vue b/app/assets/javascripts/google_cloud/components/cloudsql/instance_table.vue new file mode 100644 index 00000000000..823895214df --- /dev/null +++ b/app/assets/javascripts/google_cloud/components/cloudsql/instance_table.vue @@ -0,0 +1,75 @@ + + + diff --git a/app/assets/javascripts/google_cloud/components/databases/service_table.vue b/app/assets/javascripts/google_cloud/components/databases/service_table.vue new file mode 100644 index 00000000000..80bd6ef28fb --- /dev/null +++ b/app/assets/javascripts/google_cloud/components/databases/service_table.vue @@ -0,0 +1,221 @@ + + + diff --git a/app/assets/javascripts/work_items/components/item_title.vue b/app/assets/javascripts/work_items/components/item_title.vue index 19fbad4eaa3..1cdc9c28f05 100644 --- a/app/assets/javascripts/work_items/components/item_title.vue +++ b/app/assets/javascripts/work_items/components/item_title.vue @@ -1,5 +1,4 @@ diff --git a/app/assets/javascripts/work_items/graphql/local_update_work_item.mutation.graphql b/app/assets/javascripts/work_items/graphql/local_update_work_item.mutation.graphql index 0d31ecef6f8..43c92cf89ec 100644 --- a/app/assets/javascripts/work_items/graphql/local_update_work_item.mutation.graphql +++ b/app/assets/javascripts/work_items/graphql/local_update_work_item.mutation.graphql @@ -1,6 +1,6 @@ #import "./work_item.fragment.graphql" -mutation localUpdateWorkItem($input: LocalWorkItemAssigneesInput) { +mutation localUpdateWorkItem($input: LocalUpdateWorkItemInput) { localUpdateWorkItem(input: $input) @client { workItem { ...WorkItem diff --git a/app/assets/javascripts/work_items/graphql/provider.js b/app/assets/javascripts/work_items/graphql/provider.js index 9266b4cdccb..80d8c98e75d 100644 --- a/app/assets/javascripts/work_items/graphql/provider.js +++ b/app/assets/javascripts/work_items/graphql/provider.js @@ -2,7 +2,7 @@ import produce from 'immer'; import Vue from 'vue'; import VueApollo from 'vue-apollo'; import createDefaultClient from '~/lib/graphql'; -import { WIDGET_TYPE_ASSIGNEE } from '../constants'; +import { WIDGET_TYPE_ASSIGNEE, WIDGET_TYPE_WEIGHT } from '../constants'; import typeDefs from './typedefs.graphql'; import workItemQuery from './work_item.query.graphql'; @@ -10,7 +10,7 @@ export const temporaryConfig = { typeDefs, cacheConfig: { possibleTypes: { - LocalWorkItemWidget: ['LocalWorkItemAssignees'], + LocalWorkItemWidget: ['LocalWorkItemAssignees', 'LocalWorkItemWeight'], }, typePolicies: { WorkItem: { @@ -46,7 +46,7 @@ export const temporaryConfig = { { __typename: 'LocalWorkItemWeight', type: 'WEIGHT', - weight: 0, + weight: null, }, ] ); @@ -67,10 +67,19 @@ export const resolvers = { }); const data = produce(sourceData, (draftData) => { - const assigneesWidget = draftData.workItem.mockWidgets.find( - (widget) => widget.type === WIDGET_TYPE_ASSIGNEE, - ); - assigneesWidget.nodes = [...input.assignees]; + if (input.assignees) { + const assigneesWidget = draftData.workItem.mockWidgets.find( + (widget) => widget.type === WIDGET_TYPE_ASSIGNEE, + ); + assigneesWidget.nodes = [...input.assignees]; + } + + if (input.weight != null) { + const weightWidget = draftData.workItem.mockWidgets.find( + (widget) => widget.type === WIDGET_TYPE_WEIGHT, + ); + weightWidget.weight = input.weight; + } }); cache.writeQuery({ diff --git a/app/assets/javascripts/work_items/graphql/typedefs.graphql b/app/assets/javascripts/work_items/graphql/typedefs.graphql index de4bdad5659..71ac263a02e 100644 --- a/app/assets/javascripts/work_items/graphql/typedefs.graphql +++ b/app/assets/javascripts/work_items/graphql/typedefs.graphql @@ -21,9 +21,10 @@ extend type WorkItem { mockWidgets: [LocalWorkItemWidget] } -type LocalWorkItemAssigneesInput { +input LocalUpdateWorkItemInput { id: WorkItemID! assignees: [UserCore!] + weight: Int } type LocalWorkItemPayload { @@ -32,5 +33,5 @@ type LocalWorkItemPayload { } extend type Mutation { - localUpdateWorkItem(input: LocalWorkItemAssigneesInput!): LocalWorkItemPayload + localUpdateWorkItem(input: LocalUpdateWorkItemInput!): LocalWorkItemPayload } diff --git a/app/assets/javascripts/work_items/index.js b/app/assets/javascripts/work_items/index.js index 33e28831b54..6437df597b4 100644 --- a/app/assets/javascripts/work_items/index.js +++ b/app/assets/javascripts/work_items/index.js @@ -10,6 +10,7 @@ export const initWorkItemsRoot = () => { return new Vue({ el, + name: 'WorkItemsRoot', router: createRouter(el.dataset.fullPath), apolloProvider: createApolloProvider(), provide: { diff --git a/app/assets/stylesheets/_page_specific_files.scss b/app/assets/stylesheets/_page_specific_files.scss index cf4a415446e..be72ec33465 100644 --- a/app/assets/stylesheets/_page_specific_files.scss +++ b/app/assets/stylesheets/_page_specific_files.scss @@ -32,4 +32,3 @@ @import './pages/storage_quota'; @import './pages/tree'; @import './pages/users'; -@import './pages/work_items'; diff --git a/app/assets/stylesheets/framework/icons.scss b/app/assets/stylesheets/framework/icons.scss index 2b07cc2bed1..c04d5297316 100644 --- a/app/assets/stylesheets/framework/icons.scss +++ b/app/assets/stylesheets/framework/icons.scss @@ -67,7 +67,7 @@ } .user-avatar-link { - display: flow-root; + display: inline-block; text-decoration: none; } diff --git a/app/assets/stylesheets/page_bundles/work_items.scss b/app/assets/stylesheets/page_bundles/work_items.scss new file mode 100644 index 00000000000..af019fb091b --- /dev/null +++ b/app/assets/stylesheets/page_bundles/work_items.scss @@ -0,0 +1,15 @@ +@import 'mixins_and_variables_and_functions'; + +.gl-token-selector-token-container { + display: flex; + align-items: center; +} + +#weight-widget-input:not(:hover, :focus), +#weight-widget-input[readonly] { + box-shadow: inset 0 0 0 $gl-border-size-1 var(--white, $white); +} + +#weight-widget-input[readonly] { + background-color: var(--white, $white); +} diff --git a/app/assets/stylesheets/pages/work_items.scss b/app/assets/stylesheets/pages/work_items.scss deleted file mode 100644 index b98f55df1ed..00000000000 --- a/app/assets/stylesheets/pages/work_items.scss +++ /dev/null @@ -1,4 +0,0 @@ -.gl-token-selector-token-container { - display: flex; - align-items: center; -} diff --git a/app/controllers/projects/logs_controller.rb b/app/controllers/projects/logs_controller.rb deleted file mode 100644 index 0f751db2064..00000000000 --- a/app/controllers/projects/logs_controller.rb +++ /dev/null @@ -1,103 +0,0 @@ -# frozen_string_literal: true - -module Projects - class LogsController < Projects::ApplicationController - include ::Gitlab::Utils::StrongMemoize - - before_action :authorize_read_pod_logs! - before_action :ensure_deployments, only: %i(k8s elasticsearch) - - feature_category :logging - urgency :low - - def index - return render_404 unless Feature.enabled?(:monitor_logging, project) - - if environment || cluster - render :index - else - render :empty_logs - end - end - - def k8s - render_logs(::PodLogs::KubernetesService, k8s_params) - end - - def elasticsearch - render_logs(::PodLogs::ElasticsearchService, elasticsearch_params) - end - - private - - def render_logs(service, permitted_params) - ::Gitlab::PollingInterval.set_header(response, interval: 3_000) - - result = service.new(cluster, namespace, params: permitted_params).execute - - if result.nil? - head :accepted - elsif result[:status] == :success - render json: result - else - render status: :bad_request, json: result - end - end - - # cluster is selected either via environment or directly by id - def cluster_params - params.permit(:environment_name, :cluster_id) - end - - def k8s_params - params.permit(:container_name, :pod_name) - end - - def elasticsearch_params - params.permit(:container_name, :pod_name, :search, :start_time, :end_time, :cursor) - end - - def environment - strong_memoize(:environment) do - if cluster_params.key?(:environment_name) - ::Environments::EnvironmentsFinder.new(project, current_user, name: cluster_params[:environment_name]).execute.first - else - project.default_environment - end - end - end - - def cluster - strong_memoize(:cluster) do - if gitlab_managed_apps_logs? - clusters = ClusterAncestorsFinder.new(project, current_user).execute - clusters.find { |cluster| cluster.id == cluster_params[:cluster_id].to_i } - else - environment&.deployment_platform&.cluster - end - end - end - - def namespace - if gitlab_managed_apps_logs? - Gitlab::Kubernetes::Helm::NAMESPACE - else - environment.deployment_namespace - end - end - - def ensure_deployments - return if gitlab_managed_apps_logs? - return if cluster && namespace.present? - - render status: :bad_request, json: { - status: :error, - message: _('Environment does not have deployments') - } - end - - def gitlab_managed_apps_logs? - cluster_params.key?(:cluster_id) - end - end -end diff --git a/app/models/project_export_job.rb b/app/models/project_export_job.rb index c7fe3d7bc10..decc71ee193 100644 --- a/app/models/project_export_job.rb +++ b/app/models/project_export_job.rb @@ -2,6 +2,7 @@ class ProjectExportJob < ApplicationRecord belongs_to :project + has_many :relation_exports, class_name: 'Projects::ImportExport::RelationExport' validates :project, :jid, :status, presence: true diff --git a/app/models/projects/import_export/relation_export.rb b/app/models/projects/import_export/relation_export.rb new file mode 100644 index 00000000000..0a31e525ac2 --- /dev/null +++ b/app/models/projects/import_export/relation_export.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +module Projects + module ImportExport + class RelationExport < ApplicationRecord + self.table_name = 'project_relation_exports' + + belongs_to :project_export_job + + has_one :upload, + class_name: 'Projects::ImportExport::RelationExportUpload', + foreign_key: :project_relation_export_id, + inverse_of: :relation_export + + validates :export_error, length: { maximum: 300 } + validates :jid, length: { maximum: 255 } + validates :project_export_job, presence: true + validates :relation, presence: true, length: { maximum: 255 }, uniqueness: { scope: :project_export_job_id } + validates :status, numericality: { only_integer: true }, presence: true + end + end +end diff --git a/app/models/projects/import_export/relation_export_upload.rb b/app/models/projects/import_export/relation_export_upload.rb new file mode 100644 index 00000000000..965dc39d19f --- /dev/null +++ b/app/models/projects/import_export/relation_export_upload.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module Projects + module ImportExport + class RelationExportUpload < ApplicationRecord + include WithUploads + include ObjectStorage::BackgroundMove + + self.table_name = 'project_relation_export_uploads' + + belongs_to :relation_export, + class_name: 'Projects::ImportExport::RelationExport', + foreign_key: :project_relation_export_id, + inverse_of: :upload + + mount_uploader :export_file, ImportExportUploader + end + end +end diff --git a/app/serializers/environment_entity.rb b/app/serializers/environment_entity.rb index ac99463bd64..3473b4aebc8 100644 --- a/app/serializers/environment_entity.rb +++ b/app/serializers/environment_entity.rb @@ -66,22 +66,6 @@ class EnvironmentEntity < Grape::Entity environment.available? && can?(current_user, :stop_environment, environment) end - expose :logs_path, if: -> (*) { can_read_pod_logs? } do |environment| - project_logs_path(environment.project, environment_name: environment.name) - end - - expose :logs_api_path, if: -> (*) { can_read_pod_logs? } do |environment| - if environment.elastic_stack_available? - elasticsearch_project_logs_path(environment.project, environment_name: environment.name, format: :json) - else - k8s_project_logs_path(environment.project, environment_name: environment.name, format: :json) - end - end - - expose :enable_advanced_logs_querying, if: -> (*) { can_read_pod_logs? } do |environment| - environment.elastic_stack_available? - end - expose :can_delete do |environment| can?(current_user, :destroy_environment, environment) end @@ -102,11 +86,6 @@ class EnvironmentEntity < Grape::Entity can?(current_user, :update_environment, environment) end - def can_read_pod_logs? - Feature.enabled?(:monitor_logging, environment.project) && - can?(current_user, :read_pod_logs, environment.project) - end - def can_read_deploy_board? can?(current_user, :read_deploy_board, environment.project) end diff --git a/app/services/pod_logs/base_service.rb b/app/services/pod_logs/base_service.rb deleted file mode 100644 index e4b6ad31e33..00000000000 --- a/app/services/pod_logs/base_service.rb +++ /dev/null @@ -1,91 +0,0 @@ -# frozen_string_literal: true - -module PodLogs - class BaseService < ::BaseService - include ReactiveCaching - include Stepable - - attr_reader :cluster, :namespace, :params - - CACHE_KEY_GET_POD_LOG = 'get_pod_log' - K8S_NAME_MAX_LENGTH = 253 - - self.reactive_cache_work_type = :external_dependency - - def id - cluster.id - end - - def initialize(cluster, namespace, params: {}) - @cluster = cluster - @namespace = namespace - @params = filter_params(params.dup.stringify_keys).to_hash - end - - def execute - with_reactive_cache( - CACHE_KEY_GET_POD_LOG, - namespace, - params - ) do |result| - result - end - end - - def calculate_reactive_cache(request, _namespace, _params) - case request - when CACHE_KEY_GET_POD_LOG - execute_steps - else - exception = StandardError.new('Unknown reactive cache request') - Gitlab::ErrorTracking.track_and_raise_for_dev_exception(exception, request: request) - error(_('Unknown cache key')) - end - end - - private - - def valid_params - %w(pod_name container_name) - end - - def success_return_keys - %i(status logs pod_name container_name pods) - end - - def check_arguments(result) - return error(_('Cluster does not exist')) if cluster.nil? - return error(_('Namespace is empty')) if namespace.blank? - - result[:pod_name] = params['pod_name'].presence - result[:container_name] = params['container_name'].presence - - return error(_('Invalid pod_name')) if result[:pod_name] && !result[:pod_name].is_a?(String) - return error(_('Invalid container_name')) if result[:container_name] && !result[:container_name].is_a?(String) - - success(result) - end - - def get_raw_pods(result) - raise NotImplementedError - end - - def get_pod_names(result) - result[:pods] = result[:raw_pods].map { |p| p[:name] } - - success(result) - end - - def pod_logs(result) - raise NotImplementedError - end - - def filter_return_keys(result) - result.slice(*success_return_keys) - end - - def filter_params(params) - params.slice(*valid_params) - end - end -end diff --git a/app/services/pod_logs/elasticsearch_service.rb b/app/services/pod_logs/elasticsearch_service.rb deleted file mode 100644 index 28ccace62e5..00000000000 --- a/app/services/pod_logs/elasticsearch_service.rb +++ /dev/null @@ -1,98 +0,0 @@ -# frozen_string_literal: true - -module PodLogs - class ElasticsearchService < PodLogs::BaseService - steps :check_arguments, - :get_raw_pods, - :get_pod_names, - :check_times, - :check_search, - :check_cursor, - :pod_logs, - :filter_return_keys - - self.reactive_cache_worker_finder = ->(id, _cache_key, namespace, params) { new(::Clusters::Cluster.find(id), namespace, params: params) } - - private - - def valid_params - super + %w(search start_time end_time cursor) - end - - def success_return_keys - super + %i(cursor) - end - - def get_raw_pods(result) - client = cluster&.elasticsearch_client - return error(_('Unable to connect to Elasticsearch')) unless client - - result[:raw_pods] = ::Gitlab::Elasticsearch::Logs::Pods.new(client).pods(namespace) - - success(result) - rescue Elasticsearch::Transport::Transport::ServerError => e - ::Gitlab::ErrorTracking.track_exception(e) - - error(_('Elasticsearch returned status code: %{status_code}') % { - # ServerError is the parent class of exceptions named after HTTP status codes, eg: "Elasticsearch::Transport::Transport::Errors::NotFound" - # there is no method on the exception other than the class name to determine the type of error encountered. - status_code: e.class.name.split('::').last - }) - end - - def check_times(result) - result[:start_time] = params['start_time'] if params.key?('start_time') && Time.iso8601(params['start_time']) - result[:end_time] = params['end_time'] if params.key?('end_time') && Time.iso8601(params['end_time']) - - success(result) - rescue ArgumentError - error(_('Invalid start or end time format')) - end - - def check_search(result) - result[:search] = params['search'] if params.key?('search') - - return error(_('Invalid search parameter')) if result[:search] && !result[:search].is_a?(String) - - success(result) - end - - def check_cursor(result) - result[:cursor] = params['cursor'] if params.key?('cursor') - - return error(_('Invalid cursor parameter')) if result[:cursor] && !result[:cursor].is_a?(String) - - success(result) - end - - def pod_logs(result) - client = cluster&.elasticsearch_client - return error(_('Unable to connect to Elasticsearch')) unless client - - response = ::Gitlab::Elasticsearch::Logs::Lines.new(client).pod_logs( - namespace, - pod_name: result[:pod_name], - container_name: result[:container_name], - search: result[:search], - start_time: result[:start_time], - end_time: result[:end_time], - cursor: result[:cursor], - chart_above_v2: cluster.elastic_stack_adapter.chart_above_v2? - ) - - result.merge!(response) - - success(result) - rescue Elasticsearch::Transport::Transport::ServerError => e - ::Gitlab::ErrorTracking.track_exception(e) - - error(_('Elasticsearch returned status code: %{status_code}') % { - # ServerError is the parent class of exceptions named after HTTP status codes, eg: "Elasticsearch::Transport::Transport::Errors::NotFound" - # there is no method on the exception other than the class name to determine the type of error encountered. - status_code: e.class.name.split('::').last - }) - rescue ::Gitlab::Elasticsearch::Logs::Lines::InvalidCursor - error(_('Invalid cursor value provided')) - end - end -end diff --git a/app/services/pod_logs/kubernetes_service.rb b/app/services/pod_logs/kubernetes_service.rb deleted file mode 100644 index 28b1a179635..00000000000 --- a/app/services/pod_logs/kubernetes_service.rb +++ /dev/null @@ -1,151 +0,0 @@ -# frozen_string_literal: true - -module PodLogs - class KubernetesService < PodLogs::BaseService - LOGS_LIMIT = 500 - REPLACEMENT_CHAR = "\u{FFFD}" - - EncodingHelperError = Class.new(StandardError) - - steps :check_arguments, - :get_raw_pods, - :get_pod_names, - :check_pod_name, - :check_container_name, - :pod_logs, - :encode_logs_to_utf8, - :split_logs, - :filter_return_keys - - self.reactive_cache_worker_finder = ->(id, _cache_key, namespace, params) { new(::Clusters::Cluster.find(id), namespace, params: params) } - - private - - def get_raw_pods(result) - result[:raw_pods] = cluster.kubeclient.get_pods(namespace: namespace).map do |pod| - { - name: pod.metadata.name, - container_names: pod.spec.containers.map(&:name) - } - end - - success(result) - end - - def check_pod_name(result) - # If pod_name is not received as parameter, get the pod logs of the first - # pod of this namespace. - result[:pod_name] ||= result[:pods].first - - unless result[:pod_name] - return error(_('No pods available')) - end - - unless result[:pod_name].length.to_i <= K8S_NAME_MAX_LENGTH - return error(_('pod_name cannot be larger than %{max_length}'\ - ' chars' % { max_length: K8S_NAME_MAX_LENGTH })) - end - - unless result[:pod_name] =~ Gitlab::Regex.kubernetes_dns_subdomain_regex - return error(_('pod_name can contain only lowercase letters, digits, \'-\', and \'.\' and must start and end with an alphanumeric character')) - end - - unless result[:pods].include?(result[:pod_name]) - return error(_('Pod does not exist')) - end - - success(result) - end - - def check_container_name(result) - pod_details = result[:raw_pods].find { |p| p[:name] == result[:pod_name] } - container_names = pod_details[:container_names] - - # select first container if not specified - result[:container_name] ||= container_names.first - - unless result[:container_name] - return error(_('No containers available')) - end - - unless result[:container_name].length.to_i <= K8S_NAME_MAX_LENGTH - return error(_('container_name cannot be larger than'\ - ' %{max_length} chars' % { max_length: K8S_NAME_MAX_LENGTH })) - end - - unless result[:container_name] =~ Gitlab::Regex.kubernetes_dns_subdomain_regex - return error(_('container_name can contain only lowercase letters, digits, \'-\', and \'.\' and must start and end with an alphanumeric character')) - end - - unless container_names.include?(result[:container_name]) - return error(_('Container does not exist')) - end - - success(result) - end - - def pod_logs(result) - result[:logs] = cluster.kubeclient.get_pod_log( - result[:pod_name], - namespace, - container: result[:container_name], - tail_lines: LOGS_LIMIT, - timestamps: true - ).body - - success(result) - rescue Kubeclient::ResourceNotFoundError - error(_('Pod not found')) - rescue Kubeclient::HttpError => e - ::Gitlab::ErrorTracking.track_exception(e) - - error(_('Kubernetes API returned status code: %{error_code}') % { - error_code: e.error_code - }) - end - - # Check https://gitlab.com/gitlab-org/gitlab/issues/34965#note_292261879 - # for more details on why this is necessary. - def encode_logs_to_utf8(result) - return success(result) if result[:logs].nil? - return success(result) if result[:logs].encoding == Encoding::UTF_8 - - result[:logs] = encode_utf8(result[:logs]) - - success(result) - rescue EncodingHelperError - error(_('Unable to convert Kubernetes logs encoding to UTF-8')) - end - - def split_logs(result) - result[:logs] = result[:logs].strip.lines(chomp: true).map do |line| - # message contains a RFC3339Nano timestamp, then a space, then the log line. - # resolution of the nanoseconds can vary, so we split on the first space - values = line.split(' ', 2) - { - timestamp: values[0], - message: values[1], - pod: result[:pod_name] - } - end - - success(result) - end - - def encode_utf8(logs) - utf8_logs = Gitlab::EncodingHelper.encode_utf8(logs.dup, replace: REPLACEMENT_CHAR) - - # Gitlab::EncodingHelper.encode_utf8 can return '' or nil if an exception - # is raised while encoding. We prefer to return an error rather than wrongly - # display blank logs. - no_utf8_logs = logs.present? && utf8_logs.blank? - unexpected_encoding = utf8_logs&.encoding != Encoding::UTF_8 - - if no_utf8_logs || unexpected_encoding - raise EncodingHelperError, 'Could not convert Kubernetes logs to UTF-8' - end - - utf8_logs - end - end -end diff --git a/app/views/projects/issues/show.html.haml b/app/views/projects/issues/show.html.haml index 3572d1d6556..06c422fc4d6 100644 --- a/app/views/projects/issues/show.html.haml +++ b/app/views/projects/issues/show.html.haml @@ -3,6 +3,7 @@ - breadcrumb_title @issue.to_reference - page_title "#{@issue.title} (#{@issue.to_reference})", _("Issues") - add_page_specific_style 'page_bundles/issues_show' +- add_page_specific_style 'page_bundles/work_items' = render 'projects/issuable/show', issuable: @issue, api_awards_path: award_emoji_issue_api_path(@issue) = render 'projects/invite_members_modal', project: @project diff --git a/app/views/projects/work_items/index.html.haml b/app/views/projects/work_items/index.html.haml index 1f36afc48aa..d8b6ae96826 100644 --- a/app/views/projects/work_items/index.html.haml +++ b/app/views/projects/work_items/index.html.haml @@ -1,3 +1,4 @@ - page_title s_('WorkItem|Work Items') +- add_page_specific_style 'page_bundles/work_items' #js-work-items{ data: work_items_index_data(@project) } diff --git a/config/application.rb b/config/application.rb index 5e18d5fdd96..6b4a55d6d05 100644 --- a/config/application.rb +++ b/config/application.rb @@ -304,6 +304,7 @@ module Gitlab config.assets.precompile << "page_bundles/terms.css" config.assets.precompile << "page_bundles/todos.css" config.assets.precompile << "page_bundles/wiki.css" + config.assets.precompile << "page_bundles/work_items.css" config.assets.precompile << "page_bundles/xterm.css" config.assets.precompile << "lazy_bundles/cropper.css" config.assets.precompile << "lazy_bundles/select2.css" diff --git a/db/docs/project_relation_export_uploads.yml b/db/docs/project_relation_export_uploads.yml new file mode 100644 index 00000000000..369f6d281ee --- /dev/null +++ b/db/docs/project_relation_export_uploads.yml @@ -0,0 +1,9 @@ +--- +table_name: project_relation_export_uploads +classes: +- Projects::ImportExport::RelationExportUpload +feature_categories: +- importers +description: Used to store relation export files location +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/90624 +milestone: '15.2' diff --git a/db/docs/project_relation_exports.yml b/db/docs/project_relation_exports.yml new file mode 100644 index 00000000000..7014d4cae0d --- /dev/null +++ b/db/docs/project_relation_exports.yml @@ -0,0 +1,9 @@ +--- +table_name: project_relation_exports +classes: +- Projects::ImportExport::RelationExport +feature_categories: +- importers +description: Used to track the generation of relation export files for projects +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/90624 +milestone: '15.2' diff --git a/db/migrate/20220619182308_create_project_relation_exports.rb b/db/migrate/20220619182308_create_project_relation_exports.rb new file mode 100644 index 00000000000..7b92ca5110f --- /dev/null +++ b/db/migrate/20220619182308_create_project_relation_exports.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +class CreateProjectRelationExports < Gitlab::Database::Migration[2.0] + enable_lock_retries! + + UNIQUE_INDEX_NAME = 'index_project_export_job_relation' + + def change + create_table :project_relation_exports do |t| + t.references :project_export_job, null: false, foreign_key: { on_delete: :cascade } + t.timestamps_with_timezone null: false + t.integer :status, limit: 2, null: false, default: 0 + t.text :relation, null: false, limit: 255 + t.text :jid, limit: 255 + t.text :export_error, limit: 300 + + t.index [:project_export_job_id, :relation], unique: true, name: UNIQUE_INDEX_NAME + end + end +end diff --git a/db/migrate/20220619184931_create_project_relation_export_uploads.rb b/db/migrate/20220619184931_create_project_relation_export_uploads.rb new file mode 100644 index 00000000000..03abf980f13 --- /dev/null +++ b/db/migrate/20220619184931_create_project_relation_export_uploads.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +class CreateProjectRelationExportUploads < Gitlab::Database::Migration[2.0] + enable_lock_retries! + + INDEX = 'index_project_relation_export_upload_id' + + def change + create_table :project_relation_export_uploads do |t| + t.references :project_relation_export, null: false, foreign_key: { on_delete: :cascade }, index: { name: INDEX } + t.timestamps_with_timezone null: false + t.text :export_file, null: false, limit: 255 + end + end +end diff --git a/db/schema_migrations/20220619182308 b/db/schema_migrations/20220619182308 new file mode 100644 index 00000000000..7d85fb1c487 --- /dev/null +++ b/db/schema_migrations/20220619182308 @@ -0,0 +1 @@ +f8830ecd0c49aea19857fec9b07d238f4bc269a758b6a3495d57222ab1604c74 \ No newline at end of file diff --git a/db/schema_migrations/20220619184931 b/db/schema_migrations/20220619184931 new file mode 100644 index 00000000000..a98c1f3e847 --- /dev/null +++ b/db/schema_migrations/20220619184931 @@ -0,0 +1 @@ +2cdbc5b29e11a2ce0679f218adc57c95d483139ca0bcd1801ea97fbd4ba68ddf \ No newline at end of file diff --git a/db/structure.sql b/db/structure.sql index 066e594a39c..817e0795f2f 100644 --- a/db/structure.sql +++ b/db/structure.sql @@ -19502,6 +19502,47 @@ CREATE TABLE project_pages_metadata ( onboarding_complete boolean DEFAULT false NOT NULL ); +CREATE TABLE project_relation_export_uploads ( + id bigint NOT NULL, + project_relation_export_id bigint NOT NULL, + created_at timestamp with time zone NOT NULL, + updated_at timestamp with time zone NOT NULL, + export_file text NOT NULL, + CONSTRAINT check_d8ee243e9e CHECK ((char_length(export_file) <= 255)) +); + +CREATE SEQUENCE project_relation_export_uploads_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE project_relation_export_uploads_id_seq OWNED BY project_relation_export_uploads.id; + +CREATE TABLE project_relation_exports ( + id bigint NOT NULL, + project_export_job_id bigint NOT NULL, + created_at timestamp with time zone NOT NULL, + updated_at timestamp with time zone NOT NULL, + status smallint DEFAULT 0 NOT NULL, + relation text NOT NULL, + jid text, + export_error text, + CONSTRAINT check_15e644d856 CHECK ((char_length(jid) <= 255)), + CONSTRAINT check_4b5880b795 CHECK ((char_length(relation) <= 255)), + CONSTRAINT check_dbd1cf73d0 CHECK ((char_length(export_error) <= 300)) +); + +CREATE SEQUENCE project_relation_exports_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER SEQUENCE project_relation_exports_id_seq OWNED BY project_relation_exports.id; + CREATE TABLE project_repositories ( id bigint NOT NULL, shard_id integer NOT NULL, @@ -23317,6 +23358,10 @@ ALTER TABLE ONLY project_incident_management_settings ALTER COLUMN project_id SE ALTER TABLE ONLY project_mirror_data ALTER COLUMN id SET DEFAULT nextval('project_mirror_data_id_seq'::regclass); +ALTER TABLE ONLY project_relation_export_uploads ALTER COLUMN id SET DEFAULT nextval('project_relation_export_uploads_id_seq'::regclass); + +ALTER TABLE ONLY project_relation_exports ALTER COLUMN id SET DEFAULT nextval('project_relation_exports_id_seq'::regclass); + ALTER TABLE ONLY project_repositories ALTER COLUMN id SET DEFAULT nextval('project_repositories_id_seq'::regclass); ALTER TABLE ONLY project_repository_states ALTER COLUMN id SET DEFAULT nextval('project_repository_states_id_seq'::regclass); @@ -25455,6 +25500,12 @@ ALTER TABLE ONLY project_mirror_data ALTER TABLE ONLY project_pages_metadata ADD CONSTRAINT project_pages_metadata_pkey PRIMARY KEY (project_id); +ALTER TABLE ONLY project_relation_export_uploads + ADD CONSTRAINT project_relation_export_uploads_pkey PRIMARY KEY (id); + +ALTER TABLE ONLY project_relation_exports + ADD CONSTRAINT project_relation_exports_pkey PRIMARY KEY (id); + ALTER TABLE ONLY project_repositories ADD CONSTRAINT project_repositories_pkey PRIMARY KEY (id); @@ -29082,6 +29133,8 @@ CREATE INDEX index_project_deploy_tokens_on_deploy_token_id ON project_deploy_to CREATE UNIQUE INDEX index_project_deploy_tokens_on_project_id_and_deploy_token_id ON project_deploy_tokens USING btree (project_id, deploy_token_id); +CREATE UNIQUE INDEX index_project_export_job_relation ON project_relation_exports USING btree (project_export_job_id, relation); + CREATE UNIQUE INDEX index_project_export_jobs_on_jid ON project_export_jobs USING btree (jid); CREATE INDEX index_project_export_jobs_on_project_id_and_jid ON project_export_jobs USING btree (project_id, jid); @@ -29120,6 +29173,10 @@ CREATE INDEX index_project_pages_metadata_on_pages_deployment_id ON project_page CREATE INDEX index_project_pages_metadata_on_project_id_and_deployed_is_true ON project_pages_metadata USING btree (project_id) WHERE (deployed = true); +CREATE INDEX index_project_relation_export_upload_id ON project_relation_export_uploads USING btree (project_relation_export_id); + +CREATE INDEX index_project_relation_exports_on_project_export_job_id ON project_relation_exports USING btree (project_export_job_id); + CREATE UNIQUE INDEX index_project_repositories_on_disk_path ON project_repositories USING btree (disk_path); CREATE UNIQUE INDEX index_project_repositories_on_project_id ON project_repositories USING btree (project_id); @@ -33016,6 +33073,9 @@ ALTER TABLE ONLY design_management_versions ALTER TABLE ONLY approval_merge_request_rules_approved_approvers ADD CONSTRAINT fk_rails_6577725edb FOREIGN KEY (approval_merge_request_rule_id) REFERENCES approval_merge_request_rules(id) ON DELETE CASCADE; +ALTER TABLE ONLY project_relation_export_uploads + ADD CONSTRAINT fk_rails_660ada90c9 FOREIGN KEY (project_relation_export_id) REFERENCES project_relation_exports(id) ON DELETE CASCADE; + ALTER TABLE ONLY operations_feature_flags_clients ADD CONSTRAINT fk_rails_6650ed902c FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE; @@ -33826,6 +33886,9 @@ ALTER TABLE ONLY ci_daily_build_group_report_results ALTER TABLE ONLY packages_debian_group_architectures ADD CONSTRAINT fk_rails_ef667d1b03 FOREIGN KEY (distribution_id) REFERENCES packages_debian_group_distributions(id) ON DELETE CASCADE; +ALTER TABLE ONLY project_relation_exports + ADD CONSTRAINT fk_rails_ef89b354fc FOREIGN KEY (project_export_job_id) REFERENCES project_export_jobs(id) ON DELETE CASCADE; + ALTER TABLE ONLY label_priorities ADD CONSTRAINT fk_rails_ef916d14fa FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE; diff --git a/doc/development/event_store.md b/doc/development/event_store.md index fa7208ead04..ffde51216cf 100644 --- a/doc/development/event_store.md +++ b/doc/development/event_store.md @@ -293,6 +293,8 @@ in the `handle_event` method of the subscriber worker. ## Testing +### Testing the publisher + The publisher's responsibility is to ensure that the event is published correctly. To test that an event has been published correctly, we can use the RSpec matcher `:publish_event`: @@ -308,6 +310,25 @@ it 'publishes a ProjectDeleted event with project id and namespace id' do end ``` +It is also possible to compose matchers inside the `:publish_event` matcher. +This could be useful when we want to assert that an event is created with a certain kind of value, +but we do not know the value in advance. An example of this is when publishing an event +after creating a new record. + +```ruby +it 'publishes a ProjectCreatedEvent with project id and namespace id' do + # The project ID will only be generated when the `create_project` + # is called in the expect block. + expected_data = { project_id: kind_of(Numeric), namespace_id: group_id } + + expect { create_project(user, name: 'Project', path: 'project', namespace_id: group_id) } + .to publish_event(Projects::ProjectCreatedEvent) + .with(expected_data) +end +``` + +### Testing the subscriber + The subscriber must ensure that a published event can be consumed correctly. For this purpose we have added helpers and shared examples to standardize the way we test subscribers: diff --git a/doc/update/index.md b/doc/update/index.md index a0ad846d6ec..0244fd656bd 100644 --- a/doc/update/index.md +++ b/doc/update/index.md @@ -379,7 +379,7 @@ Find where your version sits in the upgrade path below, and upgrade GitLab accordingly, while also consulting the [version-specific upgrade instructions](#version-specific-upgrading-instructions): -`8.11.Z` -> `8.12.0` -> `8.17.7` -> `9.5.10` -> `10.8.7` -> [`11.11.8`](#1200) -> `12.0.12` -> [`12.1.17`](#1210) -> [`12.10.14`](#12100) -> `13.0.14` -> [`13.1.11`](#1310) -> [`13.8.8`](#1388) -> [`13.12.15`](#13120) -> [`14.0.12`](#1400) -> [`14.3.6`](#1430) -> [`14.9.5`](#1490) -> [`14.10.Z`](#1410) -> [`15.0.Z`](#1500) -> [latest `15.Y.Z`](https://gitlab.com/gitlab-org/gitlab/-/releases) +`8.11.Z` -> `8.12.0` -> `8.17.7` -> `9.5.10` -> `10.8.7` -> [`11.11.8`](#1200) -> `12.0.12` -> [`12.1.17`](#1210) -> [`12.10.14`](#12100) -> `13.0.14` -> [`13.1.11`](#1310) -> [`13.8.8`](#1388) -> [`13.12.15`](#13120) -> [`14.0.12`](#1400) -> [`14.3.6`](#1430) -> [`14.9.5`](#1490) -> [`14.10.Z`](#14100) -> [`15.0.Z`](#1500) -> [latest `15.Y.Z`](https://gitlab.com/gitlab-org/gitlab/-/releases) The following table, while not exhaustive, shows some examples of the supported upgrade paths. diff --git a/doc/user/project/integrations/webhook_events.md b/doc/user/project/integrations/webhook_events.md index d05e9533d7d..e6bf766e177 100644 --- a/doc/user/project/integrations/webhook_events.md +++ b/doc/user/project/integrations/webhook_events.md @@ -824,6 +824,11 @@ The available values for `object_attributes.action` in the payload are: - `unapproval` - `merge` +The field `object_attributes.oldrev` is only available when there are actual code changes, like: + +- New code is pushed. +- A [suggestion](../merge_requests/reviews/suggestions.md) is applied. + Request header: ```plaintext diff --git a/lib/gitlab/database/gitlab_schemas.yml b/lib/gitlab/database/gitlab_schemas.yml index 6a951f266ca..17078dde9d8 100644 --- a/lib/gitlab/database/gitlab_schemas.yml +++ b/lib/gitlab/database/gitlab_schemas.yml @@ -425,6 +425,8 @@ project_incident_management_settings: :gitlab_main project_metrics_settings: :gitlab_main project_mirror_data: :gitlab_main project_pages_metadata: :gitlab_main +project_relation_export_uploads: :gitlab_main +project_relation_exports: :gitlab_main project_repositories: :gitlab_main project_repository_states: :gitlab_main project_repository_storage_moves: :gitlab_main diff --git a/lib/gitlab/diff/file.rb b/lib/gitlab/diff/file.rb index 8e039d32ef5..8c55652da43 100644 --- a/lib/gitlab/diff/file.rb +++ b/lib/gitlab/diff/file.rb @@ -373,7 +373,7 @@ module Gitlab end def rendered - return unless use_semantic_ipynb_diff? && ipynb? && modified_file? && !too_large? + return unless use_semantic_ipynb_diff? && ipynb? && modified_file? && !collapsed? && !too_large? strong_memoize(:rendered) { Rendered::Notebook::DiffFile.new(self) } end diff --git a/lib/gitlab/elasticsearch/logs/lines.rb b/lib/gitlab/elasticsearch/logs/lines.rb deleted file mode 100644 index ff9185dd331..00000000000 --- a/lib/gitlab/elasticsearch/logs/lines.rb +++ /dev/null @@ -1,157 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Elasticsearch - module Logs - class Lines - InvalidCursor = Class.new(RuntimeError) - - # How many log lines to fetch in a query - LOGS_LIMIT = 500 - - def initialize(client) - @client = client - end - - def pod_logs(namespace, pod_name: nil, container_name: nil, search: nil, start_time: nil, end_time: nil, cursor: nil, chart_above_v2: true) - query = { bool: { must: [] } }.tap do |q| - filter_pod_name(q, pod_name) - filter_namespace(q, namespace) - filter_container_name(q, container_name) - filter_search(q, search) - filter_times(q, start_time, end_time) - end - - body = build_body(query, cursor, chart_above_v2) - response = @client.search body: body - - format_response(response) - end - - private - - def build_body(query, cursor = nil, chart_above_v2 = true) - offset_field = chart_above_v2 ? "log.offset" : "offset" - body = { - query: query, - # reverse order so we can query N-most recent records - sort: [ - { "@timestamp": { order: :desc } }, - { "#{offset_field}": { order: :desc } } - ], - # only return these fields in the response - _source: ["@timestamp", "message", "kubernetes.pod.name"], - # fixed limit for now, we should support paginated queries - size: ::Gitlab::Elasticsearch::Logs::Lines::LOGS_LIMIT - } - - unless cursor.nil? - body[:search_after] = decode_cursor(cursor) - end - - body - end - - def filter_pod_name(query, pod_name) - # We can filter by "all pods" with a null pod_name - return if pod_name.nil? - - query[:bool][:must] << { - match_phrase: { - "kubernetes.pod.name" => { - query: pod_name - } - } - } - end - - def filter_namespace(query, namespace) - query[:bool][:must] << { - match_phrase: { - "kubernetes.namespace" => { - query: namespace - } - } - } - end - - def filter_container_name(query, container_name) - # A pod can contain multiple containers. - # By default we return logs from every container - return if container_name.nil? - - query[:bool][:must] << { - match_phrase: { - "kubernetes.container.name" => { - query: container_name - } - } - } - end - - def filter_search(query, search) - return if search.nil? - - query[:bool][:must] << { - simple_query_string: { - query: search, - fields: [:message], - default_operator: :and - } - } - end - - def filter_times(query, start_time, end_time) - return unless start_time || end_time - - time_range = { range: { :@timestamp => {} } }.tap do |tr| - tr[:range][:@timestamp][:gte] = start_time if start_time - tr[:range][:@timestamp][:lt] = end_time if end_time - end - - query[:bool][:filter] = [time_range] - end - - def format_response(response) - results = response.fetch("hits", {}).fetch("hits", []) - last_result = results.last - results = results.map do |hit| - { - timestamp: hit["_source"]["@timestamp"], - message: hit["_source"]["message"], - pod: hit["_source"]["kubernetes"]["pod"]["name"] - } - end - - # we queried for the N-most recent records but we want them ordered oldest to newest - { - logs: results.reverse, - cursor: last_result.nil? ? nil : encode_cursor(last_result["sort"]) - } - end - - # we want to hide the implementation details of the search_after parameter from the frontend - # behind a single easily transmitted value - def encode_cursor(obj) - obj.join(',') - end - - def decode_cursor(obj) - cursor = obj.split(',').map(&:to_i) - - unless valid_cursor(cursor) - raise InvalidCursor, "invalid cursor format" - end - - cursor - end - - def valid_cursor(cursor) - cursor.instance_of?(Array) && - cursor.length == 2 && - cursor.map {|i| i.instance_of?(Integer)}.reduce(:&) - end - end - end - end -end diff --git a/lib/gitlab/elasticsearch/logs/pods.rb b/lib/gitlab/elasticsearch/logs/pods.rb deleted file mode 100644 index 66499ae956a..00000000000 --- a/lib/gitlab/elasticsearch/logs/pods.rb +++ /dev/null @@ -1,70 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Elasticsearch - module Logs - class Pods - # How many items to fetch in a query - PODS_LIMIT = 500 - CONTAINERS_LIMIT = 500 - - def initialize(client) - @client = client - end - - def pods(namespace) - body = build_body(namespace) - response = @client.search body: body - - format_response(response) - end - - private - - def build_body(namespace) - { - aggs: { - pods: { - aggs: { - containers: { - terms: { - field: 'kubernetes.container.name', - size: ::Gitlab::Elasticsearch::Logs::Pods::CONTAINERS_LIMIT - } - } - }, - terms: { - field: 'kubernetes.pod.name', - size: ::Gitlab::Elasticsearch::Logs::Pods::PODS_LIMIT - } - } - }, - query: { - bool: { - must: { - match_phrase: { - "kubernetes.namespace": namespace - } - } - } - }, - # don't populate hits, only the aggregation is needed - size: 0 - } - end - - def format_response(response) - results = response.dig("aggregations", "pods", "buckets") || [] - results.map do |bucket| - { - name: bucket["key"], - container_names: (bucket.dig("containers", "buckets") || []).map do |cbucket| - cbucket["key"] - end - } - end - end - end - end - end -end diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 7672b6b7722..258220d1366 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -708,6 +708,9 @@ msgstr "" msgid "%{labelStart}Crash Address:%{labelEnd} %{crash_address}" msgstr "" +msgid "%{labelStart}Crash State:%{labelEnd} %{crash_state}" +msgstr "" + msgid "%{labelStart}Crash State:%{labelEnd} %{stacktrace_snippet}" msgstr "" @@ -8252,6 +8255,123 @@ msgstr "" msgid "Cloud Storage" msgstr "" +msgid "CloudSeed|All" +msgstr "" + +msgid "CloudSeed|AlloyDB for Postgres" +msgstr "" + +msgid "CloudSeed|Available database services through which instances may be created" +msgstr "" + +msgid "CloudSeed|Cancel" +msgstr "" + +msgid "CloudSeed|Cloud Firestore" +msgstr "" + +msgid "CloudSeed|Cloud SQL for MySQL" +msgstr "" + +msgid "CloudSeed|Cloud SQL for Postgres" +msgstr "" + +msgid "CloudSeed|Cloud SQL for SQL Server" +msgstr "" + +msgid "CloudSeed|CloudSQL Instance" +msgstr "" + +msgid "CloudSeed|Create cluster" +msgstr "" + +msgid "CloudSeed|Create database" +msgstr "" + +msgid "CloudSeed|Create instance" +msgstr "" + +msgid "CloudSeed|Database instance is generated within the selected Google Cloud project" +msgstr "" + +msgid "CloudSeed|Database instances associated with this project" +msgstr "" + +msgid "CloudSeed|Database version" +msgstr "" + +msgid "CloudSeed|Description" +msgstr "" + +msgid "CloudSeed|Determines memory and virtual cores available to your instance" +msgstr "" + +msgid "CloudSeed|Enhance security by storing database variables in secret managers - learn more about %{docLinkStart}secret management with GitLab%{docLinkEnd}" +msgstr "" + +msgid "CloudSeed|Environment" +msgstr "" + +msgid "CloudSeed|Flexible, scalable NoSQL cloud database for client- and server-side development" +msgstr "" + +msgid "CloudSeed|Fully managed PostgreSQL-compatible service for high-demand workloads" +msgstr "" + +msgid "CloudSeed|Fully managed relational database service for MySQL" +msgstr "" + +msgid "CloudSeed|Fully managed relational database service for PostgreSQL" +msgstr "" + +msgid "CloudSeed|Fully managed relational database service for SQL Server" +msgstr "" + +msgid "CloudSeed|Generated database instance is linked to the selected branch or tag" +msgstr "" + +msgid "CloudSeed|Google Cloud Project" +msgstr "" + +msgid "CloudSeed|Google Cloud project" +msgstr "" + +msgid "CloudSeed|I accept Google Cloud pricing and responsibilities involved with managing database instances" +msgstr "" + +msgid "CloudSeed|Instances" +msgstr "" + +msgid "CloudSeed|Learn more about pricing for %{cloudsqlPricingStart}Cloud SQL%{cloudsqlPricingEnd}, %{alloydbPricingStart}Alloy DB%{alloydbPricingEnd}, %{memorystorePricingStart}Memorystore%{memorystorePricingEnd} and %{firestorePricingStart}Firestore%{firestorePricingEnd}." +msgstr "" + +msgid "CloudSeed|Machine type" +msgstr "" + +msgid "CloudSeed|Memorystore for Redis" +msgstr "" + +msgid "CloudSeed|No instances" +msgstr "" + +msgid "CloudSeed|Refs" +msgstr "" + +msgid "CloudSeed|Scalable, secure, and highly available in-memory service for Redis" +msgstr "" + +msgid "CloudSeed|Service" +msgstr "" + +msgid "CloudSeed|Services" +msgstr "" + +msgid "CloudSeed|There are no instances to display." +msgstr "" + +msgid "CloudSeed|Version" +msgstr "" + msgid "Cluster" msgstr "" @@ -8261,9 +8381,6 @@ msgstr "" msgid "Cluster cache cleared." msgstr "" -msgid "Cluster does not exist" -msgstr "" - msgid "Cluster is required for Stages::ClusterEndpointInserter" msgstr "" @@ -9699,9 +9816,6 @@ msgstr "" msgid "Container Scanning" msgstr "" -msgid "Container does not exist" -msgstr "" - msgid "Container must be a project or a group." msgstr "" @@ -13915,9 +14029,6 @@ msgstr "" msgid "Elasticsearch reindexing was not started: %{errors}" msgstr "" -msgid "Elasticsearch returned status code: %{status_code}" -msgstr "" - msgid "Elasticsearch zero-downtime reindexing" msgstr "" @@ -14374,9 +14485,6 @@ msgstr "" msgid "Environment" msgstr "" -msgid "Environment does not have deployments" -msgstr "" - msgid "Environment is required for Stages::MetricEndpointInserter" msgstr "" @@ -16889,9 +16997,6 @@ msgstr "" msgid "Geo|Primary" msgstr "" -msgid "Geo|Primary node" -msgstr "" - msgid "Geo|Primary site" msgstr "" @@ -16991,9 +17096,6 @@ msgstr "" msgid "Geo|Secondary" msgstr "" -msgid "Geo|Secondary node" -msgstr "" - msgid "Geo|Secondary site" msgstr "" @@ -20928,15 +21030,6 @@ msgstr "" msgid "Invalid URL: %{url}" msgstr "" -msgid "Invalid container_name" -msgstr "" - -msgid "Invalid cursor parameter" -msgstr "" - -msgid "Invalid cursor value provided" -msgstr "" - msgid "Invalid date" msgstr "" @@ -20979,9 +21072,6 @@ msgstr "" msgid "Invalid pin code." msgstr "" -msgid "Invalid pod_name" -msgstr "" - msgid "Invalid policy type" msgstr "" @@ -20994,15 +21084,9 @@ msgstr "" msgid "Invalid rule" msgstr "" -msgid "Invalid search parameter" -msgstr "" - msgid "Invalid server response" msgstr "" -msgid "Invalid start or end time format" -msgstr "" - msgid "Invalid status" msgstr "" @@ -22365,9 +22449,6 @@ msgstr "" msgid "Kubernetes" msgstr "" -msgid "Kubernetes API returned status code: %{error_code}" -msgstr "" - msgid "Kubernetes Cluster" msgstr "" @@ -25251,9 +25332,6 @@ msgstr "" msgid "Namespace ID:" msgstr "" -msgid "Namespace is empty" -msgstr "" - msgid "Namespace:" msgstr "" @@ -25726,9 +25804,6 @@ msgstr "" msgid "No connection could be made to a Gitaly Server, please check your logs!" msgstr "" -msgid "No containers available" -msgstr "" - msgid "No contributions" msgstr "" @@ -25864,9 +25939,6 @@ msgstr "" msgid "No plan" msgstr "" -msgid "No pods available" -msgstr "" - msgid "No policy matches this license" msgstr "" @@ -28875,12 +28947,6 @@ msgstr "" msgid "Please wait while we import the repository for you. Refresh at will." msgstr "" -msgid "Pod does not exist" -msgstr "" - -msgid "Pod not found" -msgstr "" - msgid "Pods in use" msgstr "" @@ -40769,9 +40835,6 @@ msgstr "" msgid "Unable to collect memory info" msgstr "" -msgid "Unable to connect to Elasticsearch" -msgstr "" - msgid "Unable to connect to Prometheus server" msgstr "" @@ -40781,9 +40844,6 @@ msgstr "" msgid "Unable to connect to the Jira instance. Please check your Jira integration configuration." msgstr "" -msgid "Unable to convert Kubernetes logs encoding to UTF-8" -msgstr "" - msgid "Unable to create link to vulnerability" msgstr "" @@ -40922,9 +40982,6 @@ msgstr "" msgid "Unknown Error" msgstr "" -msgid "Unknown cache key" -msgstr "" - msgid "Unknown encryption strategy: %{encrypted_strategy}!" msgstr "" @@ -45280,12 +45337,6 @@ msgstr "" msgid "container registry images" msgstr "" -msgid "container_name can contain only lowercase letters, digits, '-', and '.' and must start and end with an alphanumeric character" -msgstr "" - -msgid "container_name cannot be larger than %{max_length} chars" -msgstr "" - msgid "contains URLs that exceed the 1024 character limit (%{urls})" msgstr "" @@ -46276,12 +46327,6 @@ msgstr "" msgid "pipelineEditorWalkthrough|You can use the file tree to view your pipeline configuration files. %{linkStart}Learn more%{linkEnd}" msgstr "" -msgid "pod_name can contain only lowercase letters, digits, '-', and '.' and must start and end with an alphanumeric character" -msgstr "" - -msgid "pod_name cannot be larger than %{max_length} chars" -msgstr "" - msgid "point" msgid_plural "points" msgstr[0] "" diff --git a/spec/factories/projects/import_export/export_relation.rb b/spec/factories/projects/import_export/export_relation.rb new file mode 100644 index 00000000000..2b6419dcecb --- /dev/null +++ b/spec/factories/projects/import_export/export_relation.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +FactoryBot.define do + factory :project_relation_export, class: 'Projects::ImportExport::RelationExport' do + project_export_job factory: :project_export_job + + relation { 'labels' } + status { 0 } + sequence(:jid) { |n| "project_relation_export_#{n}" } + end +end diff --git a/spec/fixtures/gitlab/import_export/labels.tar.gz b/spec/fixtures/gitlab/import_export/labels.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..8329dcf3b4a1e57fe697893180dee6a14a1185e0 GIT binary patch literal 768 zcmV+b1ONOViwFQrwy$CU1MOAIa?>yrRm*G00#>m0m~E3vwq(^QT{tgNCP|ytDYOjJ zQ5>Z>II(LhG{CUqJJ|9AY}oK6Y+12q#RBC;OQCHhq=Zg$W;8dFuXNAJ^^t?nZ*WH} zf^!6fkP@QwqS22~!~Hmbg0Mz(s_Rh43Pc1U1Qc?fq=slmf`yVw=+{Db&li5vZ?Vw* zEl;M^>!+ME(D5@~5Cnt%Cw4tn&&T2edocuj-mj5Zl$NY~7CP3*-r1}}SPNk`k(24L=pMb|A-PwXW##Sf!C^Vg~!#TTH=07 zT~Hn0^FukkM;YM^%9X(@vg_8=q$U$W&IGURG9j}xsA+LX;t<=Kp47FZW+bQycjUHq zP#Y;v@Mhr2Of^|&Uc?vF8jqaN4TLNCBwKCITTu5xza8`l^ylJb(51ZIXIALGyFH?= zAG*Tjk(zw+w0m+#q=#~HJ*aPDhl*|14%^XD%~jJeF&|5_KB8uQx|$#OUj3+I!-S4r y9ZNACQ8Asa;xv;x9DY { }); it.each` - status | iconName | lineNumber - ${'Never connected'} | ${'status-neutral'} | ${0} - ${'Connected'} | ${'status-success'} | ${1} - ${'Not connected'} | ${'severity-critical'} | ${2} + status | iconName | lineNumber + ${'Never connected'} | ${'status-neutral'} | ${0} + ${'Connected'} | ${'status-success'} | ${1} + ${'Not connected'} | ${'status-alert'} | ${2} `( 'displays agent connection status as "$status" at line $lineNumber', ({ status, iconName, lineNumber }) => { diff --git a/spec/frontend/google_cloud/components/cloudsql/create_instance_form_spec.js b/spec/frontend/google_cloud/components/cloudsql/create_instance_form_spec.js new file mode 100644 index 00000000000..de644a33b50 --- /dev/null +++ b/spec/frontend/google_cloud/components/cloudsql/create_instance_form_spec.js @@ -0,0 +1,103 @@ +import { GlFormCheckbox } from '@gitlab/ui'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import InstanceForm from '~/google_cloud/components/cloudsql/create_instance_form.vue'; + +describe('google_cloud::cloudsql::create_instance_form component', () => { + let wrapper; + + const findByTestId = (id) => wrapper.findByTestId(id); + const findCancelButton = () => findByTestId('cancel-button'); + const findCheckbox = () => wrapper.findComponent(GlFormCheckbox); + const findHeader = () => wrapper.find('header'); + const findSubmitButton = () => findByTestId('submit-button'); + + const propsData = { + gcpProjects: [], + refs: [], + cancelPath: '#cancel-url', + formTitle: 'mock form title', + formDescription: 'mock form description', + databaseVersions: [], + tiers: [], + }; + + beforeEach(() => { + wrapper = shallowMountExtended(InstanceForm, { propsData, stubs: { GlFormCheckbox } }); + }); + + afterEach(() => { + wrapper.destroy(); + }); + + it('contains header', () => { + expect(findHeader().exists()).toBe(true); + }); + + it('contains GCP project form group', () => { + const formGroup = findByTestId('form_group_gcp_project'); + expect(formGroup.exists()).toBe(true); + expect(formGroup.attributes('label')).toBe(InstanceForm.i18n.gcpProjectLabel); + expect(formGroup.attributes('description')).toBe(InstanceForm.i18n.gcpProjectDescription); + }); + + it('contains GCP project dropdown', () => { + const select = findByTestId('select_gcp_project'); + expect(select.exists()).toBe(true); + }); + + it('contains Environments form group', () => { + const formGroup = findByTestId('form_group_environments'); + expect(formGroup.exists()).toBe(true); + expect(formGroup.attributes('label')).toBe(InstanceForm.i18n.refsLabel); + expect(formGroup.attributes('description')).toBe(InstanceForm.i18n.refsDescription); + }); + + it('contains Environments dropdown', () => { + const select = findByTestId('select_environments'); + expect(select.exists()).toBe(true); + }); + + it('contains Tier form group', () => { + const formGroup = findByTestId('form_group_tier'); + expect(formGroup.exists()).toBe(true); + expect(formGroup.attributes('label')).toBe(InstanceForm.i18n.tierLabel); + expect(formGroup.attributes('description')).toBe(InstanceForm.i18n.tierDescription); + }); + + it('contains Tier dropdown', () => { + const select = findByTestId('select_tier'); + expect(select.exists()).toBe(true); + }); + + it('contains Database Version form group', () => { + const formGroup = findByTestId('form_group_database_version'); + expect(formGroup.exists()).toBe(true); + expect(formGroup.attributes('label')).toBe(InstanceForm.i18n.databaseVersionLabel); + }); + + it('contains Database Version dropdown', () => { + const select = findByTestId('select_database_version'); + expect(select.exists()).toBe(true); + }); + + it('contains Submit button', () => { + expect(findSubmitButton().exists()).toBe(true); + expect(findSubmitButton().text()).toBe(InstanceForm.i18n.submitLabel); + }); + + it('contains Cancel button', () => { + expect(findCancelButton().exists()).toBe(true); + expect(findCancelButton().text()).toBe(InstanceForm.i18n.cancelLabel); + expect(findCancelButton().attributes('href')).toBe('#cancel-url'); + }); + + it('contains Confirmation checkbox', () => { + const checkbox = findCheckbox(); + expect(checkbox.text()).toBe(InstanceForm.i18n.checkboxLabel); + }); + + it('checkbox must be required', () => { + const checkbox = findCheckbox(); + expect(checkbox.attributes('required')).toBe('true'); + }); +}); diff --git a/spec/frontend/google_cloud/components/cloudsql/instance_table_spec.js b/spec/frontend/google_cloud/components/cloudsql/instance_table_spec.js new file mode 100644 index 00000000000..286f2b8e379 --- /dev/null +++ b/spec/frontend/google_cloud/components/cloudsql/instance_table_spec.js @@ -0,0 +1,65 @@ +import { shallowMount } from '@vue/test-utils'; +import { GlEmptyState, GlTable } from '@gitlab/ui'; +import InstanceTable from '~/google_cloud/components/cloudsql/instance_table.vue'; + +describe('google_cloud::databases::service_table component', () => { + let wrapper; + + const findEmptyState = () => wrapper.findComponent(GlEmptyState); + const findTable = () => wrapper.findComponent(GlTable); + + afterEach(() => { + wrapper.destroy(); + }); + + describe('when there are no instances', () => { + beforeEach(() => { + const propsData = { + cloudsqlInstances: [], + emptyIllustrationUrl: '#empty-illustration-url', + }; + wrapper = shallowMount(InstanceTable, { propsData }); + }); + + it('should depict empty state', () => { + const emptyState = findEmptyState(); + expect(emptyState.exists()).toBe(true); + expect(emptyState.attributes('title')).toBe(InstanceTable.i18n.noInstancesTitle); + expect(emptyState.attributes('description')).toBe(InstanceTable.i18n.noInstancesDescription); + }); + }); + + describe('when there are three instances', () => { + beforeEach(() => { + const propsData = { + cloudsqlInstances: [ + { + ref: '*', + gcp_project: 'test-gcp-project', + instance_name: 'postgres-14-instance', + version: 'POSTGRES_14', + }, + { + ref: 'production', + gcp_project: 'prod-gcp-project', + instance_name: 'postgres-14-instance', + version: 'POSTGRES_14', + }, + { + ref: 'staging', + gcp_project: 'test-gcp-project', + instance_name: 'postgres-14-instance', + version: 'POSTGRES_14', + }, + ], + emptyIllustrationUrl: '#empty-illustration-url', + }; + wrapper = shallowMount(InstanceTable, { propsData }); + }); + + it('should contain a table', () => { + const table = findTable(); + expect(table.exists()).toBe(true); + }); + }); +}); diff --git a/spec/frontend/google_cloud/components/databases/service_table_spec.js b/spec/frontend/google_cloud/components/databases/service_table_spec.js new file mode 100644 index 00000000000..142e32c1a4b --- /dev/null +++ b/spec/frontend/google_cloud/components/databases/service_table_spec.js @@ -0,0 +1,44 @@ +import { GlTable } from '@gitlab/ui'; +import { mountExtended } from 'helpers/vue_test_utils_helper'; +import ServiceTable from '~/google_cloud/components/databases/service_table.vue'; + +describe('google_cloud::databases::service_table component', () => { + let wrapper; + + const findTable = () => wrapper.findComponent(GlTable); + + beforeEach(() => { + const propsData = { + cloudsqlPostgresUrl: '#url-cloudsql-postgres', + cloudsqlMysqlUrl: '#url-cloudsql-mysql', + cloudsqlSqlserverUrl: '#url-cloudsql-sqlserver', + alloydbPostgresUrl: '#url-alloydb-postgres', + memorystoreRedisUrl: '#url-memorystore-redis', + firestoreUrl: '#url-firestore', + }; + wrapper = mountExtended(ServiceTable, { propsData }); + }); + + afterEach(() => { + wrapper.destroy(); + }); + + it('should contain a table', () => { + expect(findTable().exists()).toBe(true); + }); + + it.each` + name | testId | url + ${'cloudsql-postgres'} | ${'button-cloudsql-postgres'} | ${'#url-cloudsql-postgres'} + ${'cloudsql-mysql'} | ${'button-cloudsql-mysql'} | ${'#url-cloudsql-mysql'} + ${'cloudsql-sqlserver'} | ${'button-cloudsql-sqlserver'} | ${'#url-cloudsql-sqlserver'} + ${'alloydb-postgres'} | ${'button-alloydb-postgres'} | ${'#url-alloydb-postgres'} + ${'memorystore-redis'} | ${'button-memorystore-redis'} | ${'#url-memorystore-redis'} + ${'firestore'} | ${'button-firestore'} | ${'#url-firestore'} + `('renders $name button with correct url', ({ testId, url }) => { + const button = wrapper.findByTestId(testId); + + expect(button.exists()).toBe(true); + expect(button.attributes('href')).toBe(url); + }); +}); diff --git a/spec/frontend/security_configuration/components/app_spec.js b/spec/frontend/security_configuration/components/app_spec.js index 97b6a1e8b74..c3824ad9701 100644 --- a/spec/frontend/security_configuration/components/app_spec.js +++ b/spec/frontend/security_configuration/components/app_spec.js @@ -42,12 +42,36 @@ describe('App component', () => { let wrapper; let userCalloutDismissSpy; - const createComponent = ({ shouldShowCallout = true, ...propsData }) => { + const securityFeaturesMock = [ + { + name: SAST_NAME, + shortName: SAST_SHORT_NAME, + description: SAST_DESCRIPTION, + helpPath: SAST_HELP_PATH, + configurationHelpPath: SAST_CONFIG_HELP_PATH, + type: REPORT_TYPE_SAST, + available: true, + }, + ]; + + const complianceFeaturesMock = [ + { + name: LICENSE_COMPLIANCE_NAME, + description: LICENSE_COMPLIANCE_DESCRIPTION, + helpPath: LICENSE_COMPLIANCE_HELP_PATH, + type: REPORT_TYPE_LICENSE_COMPLIANCE, + configurationHelpPath: LICENSE_COMPLIANCE_HELP_PATH, + }, + ]; + + const createComponent = ({ shouldShowCallout = true, ...propsData } = {}) => { userCalloutDismissSpy = jest.fn(); wrapper = extendedWrapper( mount(SecurityConfigurationApp, { propsData: { + augmentedSecurityFeatures: securityFeaturesMock, + augmentedComplianceFeatures: complianceFeaturesMock, securityTrainingEnabled: true, ...propsData, }, @@ -108,38 +132,13 @@ describe('App component', () => { const findAutoDevopsEnabledAlert = () => wrapper.findComponent(AutoDevopsEnabledAlert); const findVulnerabilityManagementTab = () => wrapper.findByTestId('vulnerability-management-tab'); - const securityFeaturesMock = [ - { - name: SAST_NAME, - shortName: SAST_SHORT_NAME, - description: SAST_DESCRIPTION, - helpPath: SAST_HELP_PATH, - configurationHelpPath: SAST_CONFIG_HELP_PATH, - type: REPORT_TYPE_SAST, - available: true, - }, - ]; - - const complianceFeaturesMock = [ - { - name: LICENSE_COMPLIANCE_NAME, - description: LICENSE_COMPLIANCE_DESCRIPTION, - helpPath: LICENSE_COMPLIANCE_HELP_PATH, - type: REPORT_TYPE_LICENSE_COMPLIANCE, - configurationHelpPath: LICENSE_COMPLIANCE_HELP_PATH, - }, - ]; - afterEach(() => { wrapper.destroy(); }); describe('basic structure', () => { - beforeEach(async () => { - createComponent({ - augmentedSecurityFeatures: securityFeaturesMock, - augmentedComplianceFeatures: complianceFeaturesMock, - }); + beforeEach(() => { + createComponent(); }); it('renders main-heading with correct text', () => { @@ -199,10 +198,7 @@ describe('App component', () => { describe('Manage via MR Error Alert', () => { beforeEach(() => { - createComponent({ - augmentedSecurityFeatures: securityFeaturesMock, - augmentedComplianceFeatures: complianceFeaturesMock, - }); + createComponent(); }); describe('on initial load', () => { @@ -238,8 +234,6 @@ describe('App component', () => { describe('given the right props', () => { beforeEach(() => { createComponent({ - augmentedSecurityFeatures: securityFeaturesMock, - augmentedComplianceFeatures: complianceFeaturesMock, autoDevopsEnabled: false, gitlabCiPresent: false, canEnableAutoDevops: true, @@ -261,10 +255,7 @@ describe('App component', () => { describe('given the wrong props', () => { beforeEach(() => { - createComponent({ - augmentedSecurityFeatures: securityFeaturesMock, - augmentedComplianceFeatures: complianceFeaturesMock, - }); + createComponent(); }); it('should not show AutoDevopsAlert', () => { expect(findAutoDevopsAlert().exists()).toBe(false); @@ -289,8 +280,6 @@ describe('App component', () => { } createComponent({ - augmentedSecurityFeatures: securityFeaturesMock, - augmentedComplianceFeatures: complianceFeaturesMock, autoDevopsEnabled, }); }); @@ -348,7 +337,6 @@ describe('App component', () => { describe('given at least one unavailable feature', () => { beforeEach(() => { createComponent({ - augmentedSecurityFeatures: securityFeaturesMock, augmentedComplianceFeatures: complianceFeaturesMock.map(makeAvailable(false)), }); }); @@ -369,7 +357,6 @@ describe('App component', () => { describe('given at least one unavailable feature, but banner is already dismissed', () => { beforeEach(() => { createComponent({ - augmentedSecurityFeatures: securityFeaturesMock, augmentedComplianceFeatures: complianceFeaturesMock.map(makeAvailable(false)), shouldShowCallout: false, }); @@ -397,8 +384,6 @@ describe('App component', () => { describe('when given latestPipelinePath props', () => { beforeEach(() => { createComponent({ - augmentedSecurityFeatures: securityFeaturesMock, - augmentedComplianceFeatures: complianceFeaturesMock, latestPipelinePath: 'test/path', }); }); @@ -425,8 +410,6 @@ describe('App component', () => { describe('given gitlabCiPresent & gitlabCiHistoryPath props', () => { beforeEach(() => { createComponent({ - augmentedSecurityFeatures: securityFeaturesMock, - augmentedComplianceFeatures: complianceFeaturesMock, gitlabCiPresent: true, gitlabCiHistoryPath, }); @@ -446,8 +429,6 @@ describe('App component', () => { beforeEach(async () => { createComponent({ - augmentedSecurityFeatures: securityFeaturesMock, - augmentedComplianceFeatures: complianceFeaturesMock, ...props, }); }); diff --git a/spec/frontend/work_items/components/item_title_spec.js b/spec/frontend/work_items/components/item_title_spec.js index 2c3f6ef8634..a55f448c9a2 100644 --- a/spec/frontend/work_items/components/item_title_spec.js +++ b/spec/frontend/work_items/components/item_title_spec.js @@ -1,5 +1,4 @@ import { shallowMount } from '@vue/test-utils'; -import { escape } from 'lodash'; import ItemTitle from '~/work_items/components/item_title.vue'; jest.mock('lodash/escape', () => jest.fn((fn) => fn)); @@ -51,6 +50,5 @@ describe('ItemTitle', () => { await findInputEl().trigger(sourceEvent); expect(wrapper.emitted(eventName)).toBeTruthy(); - expect(escape).toHaveBeenCalledWith(mockUpdatedTitle); }); }); diff --git a/spec/frontend/work_items/components/work_item_weight_spec.js b/spec/frontend/work_items/components/work_item_weight_spec.js index 80a1d032ad7..c3bbea26cda 100644 --- a/spec/frontend/work_items/components/work_item_weight_spec.js +++ b/spec/frontend/work_items/components/work_item_weight_spec.js @@ -1,21 +1,51 @@ -import { shallowMount } from '@vue/test-utils'; +import { GlForm, GlFormInput } from '@gitlab/ui'; +import { nextTick } from 'vue'; +import { mockTracking } from 'helpers/tracking_helper'; +import { mountExtended } from 'helpers/vue_test_utils_helper'; +import { __ } from '~/locale'; import WorkItemWeight from '~/work_items/components/work_item_weight.vue'; +import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants'; +import localUpdateWorkItemMutation from '~/work_items/graphql/local_update_work_item.mutation.graphql'; -describe('WorkItemAssignees component', () => { +describe('WorkItemWeight component', () => { let wrapper; - const createComponent = ({ weight, hasIssueWeightsFeature = true } = {}) => { - wrapper = shallowMount(WorkItemWeight, { + const mutateSpy = jest.fn(); + const workItemId = 'gid://gitlab/WorkItem/1'; + const workItemType = 'Task'; + + const findForm = () => wrapper.findComponent(GlForm); + const findInput = () => wrapper.findComponent(GlFormInput); + + const createComponent = ({ + canUpdate = false, + hasIssueWeightsFeature = true, + isEditing = false, + weight, + } = {}) => { + wrapper = mountExtended(WorkItemWeight, { propsData: { + canUpdate, weight, + workItemId, + workItemType, }, provide: { hasIssueWeightsFeature, }, + mocks: { + $apollo: { + mutate: mutateSpy, + }, + }, }); + + if (isEditing) { + findInput().vm.$emit('focus'); + } }; - describe('weight licensed feature', () => { + describe('`issue_weights` licensed feature', () => { describe.each` description | hasIssueWeightsFeature | exists ${'when available'} | ${true} | ${true} @@ -24,23 +54,111 @@ describe('WorkItemAssignees component', () => { it(hasIssueWeightsFeature ? 'renders component' : 'does not render component', () => { createComponent({ hasIssueWeightsFeature }); - expect(wrapper.find('div').exists()).toBe(exists); + expect(findForm().exists()).toBe(exists); }); }); }); - describe('weight text', () => { - describe.each` - description | weight | text - ${'renders 1'} | ${1} | ${'1'} - ${'renders 0'} | ${0} | ${'0'} - ${'renders None'} | ${null} | ${'None'} - ${'renders None'} | ${undefined} | ${'None'} - `('when weight is $weight', ({ description, weight, text }) => { - it(description, () => { - createComponent({ weight }); + describe('weight input', () => { + it('has "Weight" label', () => { + createComponent(); - expect(wrapper.text()).toContain(text); + expect(wrapper.findByLabelText(__('Weight')).exists()).toBe(true); + }); + + describe('placeholder attribute', () => { + describe.each` + description | isEditing | canUpdate | value + ${'when not editing and cannot update'} | ${false} | ${false} | ${__('None')} + ${'when editing and cannot update'} | ${true} | ${false} | ${__('None')} + ${'when not editing and can update'} | ${false} | ${true} | ${__('None')} + ${'when editing and can update'} | ${true} | ${true} | ${__('Enter a number')} + `('$description', ({ isEditing, canUpdate, value }) => { + it(`has a value of "${value}"`, async () => { + createComponent({ canUpdate, isEditing }); + await nextTick(); + + expect(findInput().attributes('placeholder')).toBe(value); + }); + }); + }); + + describe('readonly attribute', () => { + describe.each` + description | canUpdate | value + ${'when cannot update'} | ${false} | ${'readonly'} + ${'when can update'} | ${true} | ${undefined} + `('$description', ({ canUpdate, value }) => { + it(`renders readonly=${value}`, () => { + createComponent({ canUpdate }); + + expect(findInput().attributes('readonly')).toBe(value); + }); + }); + }); + + describe('type attribute', () => { + describe.each` + description | isEditing | canUpdate | type + ${'when not editing and cannot update'} | ${false} | ${false} | ${'text'} + ${'when editing and cannot update'} | ${true} | ${false} | ${'text'} + ${'when not editing and can update'} | ${false} | ${true} | ${'text'} + ${'when editing and can update'} | ${true} | ${true} | ${'number'} + `('$description', ({ isEditing, canUpdate, type }) => { + it(`has a value of "${type}"`, async () => { + createComponent({ canUpdate, isEditing }); + await nextTick(); + + expect(findInput().attributes('type')).toBe(type); + }); + }); + }); + + describe('value attribute', () => { + describe.each` + weight | value + ${1} | ${'1'} + ${0} | ${'0'} + ${null} | ${''} + ${undefined} | ${''} + `('when `weight` prop is "$weight"', ({ weight, value }) => { + it(`value is "${value}"`, () => { + createComponent({ weight }); + + expect(findInput().element.value).toBe(value); + }); + }); + }); + + describe('when blurred', () => { + it('calls a mutation to update the weight', () => { + const weight = 0; + createComponent({ isEditing: true, weight }); + + findInput().trigger('blur'); + + expect(mutateSpy).toHaveBeenCalledWith({ + mutation: localUpdateWorkItemMutation, + variables: { + input: { + id: workItemId, + weight, + }, + }, + }); + }); + + it('tracks updating the weight', () => { + const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn); + createComponent(); + + findInput().trigger('blur'); + + expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'updated_weight', { + category: TRACKING_CATEGORY_SHOW, + label: 'item_weight', + property: 'type_Task', + }); }); }); }); diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb index 34f4bdde3b5..28557aab830 100644 --- a/spec/lib/gitlab/diff/file_spec.rb +++ b/spec/lib/gitlab/diff/file_spec.rb @@ -129,6 +129,14 @@ RSpec.describe Gitlab::Diff::File do expect(diff_file.rendered).to be_kind_of(Gitlab::Diff::Rendered::Notebook::DiffFile) end + context 'when collapsed' do + it 'is nil' do + expect(diff).to receive(:collapsed?).and_return(true) + + expect(diff_file.rendered).to be_nil + end + end + context 'when too large' do it 'is nil' do expect(diff).to receive(:too_large?).and_return(true) diff --git a/spec/models/project_export_job_spec.rb b/spec/models/project_export_job_spec.rb index 5a2b1443f8b..653d4d2df27 100644 --- a/spec/models/project_export_job_spec.rb +++ b/spec/models/project_export_job_spec.rb @@ -3,17 +3,14 @@ require 'spec_helper' RSpec.describe ProjectExportJob, type: :model do - let(:project) { create(:project) } - let!(:job1) { create(:project_export_job, project: project, status: 0) } - let!(:job2) { create(:project_export_job, project: project, status: 2) } - describe 'associations' do - it { expect(job1).to belong_to(:project) } + it { is_expected.to belong_to(:project) } + it { is_expected.to have_many(:relation_exports) } end describe 'validations' do - it { expect(job1).to validate_presence_of(:project) } - it { expect(job1).to validate_presence_of(:jid) } - it { expect(job1).to validate_presence_of(:status) } + it { is_expected.to validate_presence_of(:project) } + it { is_expected.to validate_presence_of(:jid) } + it { is_expected.to validate_presence_of(:status) } end end diff --git a/spec/models/projects/import_export/relation_export_spec.rb b/spec/models/projects/import_export/relation_export_spec.rb new file mode 100644 index 00000000000..c74ca82e161 --- /dev/null +++ b/spec/models/projects/import_export/relation_export_spec.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Projects::ImportExport::RelationExport, type: :model do + subject { create(:project_relation_export) } + + describe 'associations' do + it { is_expected.to belong_to(:project_export_job) } + it { is_expected.to have_one(:upload) } + end + + describe 'validations' do + it { is_expected.to validate_presence_of(:project_export_job) } + it { is_expected.to validate_presence_of(:relation) } + it { is_expected.to validate_uniqueness_of(:relation).scoped_to(:project_export_job_id) } + it { is_expected.to validate_presence_of(:status) } + it { is_expected.to validate_numericality_of(:status).only_integer } + it { is_expected.to validate_length_of(:relation).is_at_most(255) } + it { is_expected.to validate_length_of(:jid).is_at_most(255) } + it { is_expected.to validate_length_of(:export_error).is_at_most(300) } + end +end diff --git a/spec/models/projects/import_export/relation_export_upload_spec.rb b/spec/models/projects/import_export/relation_export_upload_spec.rb new file mode 100644 index 00000000000..c0014c5a14c --- /dev/null +++ b/spec/models/projects/import_export/relation_export_upload_spec.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Projects::ImportExport::RelationExportUpload, type: :model do + subject { described_class.new(relation_export: project_relation_export) } + + let_it_be(:project_relation_export) { create(:project_relation_export) } + + describe 'associations' do + it { is_expected.to belong_to(:relation_export) } + end + + it 'stores export file' do + stub_uploads_object_storage(ImportExportUploader, enabled: false) + + filename = 'labels.tar.gz' + subject.export_file = fixture_file_upload("spec/fixtures/gitlab/import_export/#{filename}") + + subject.save! + + url = "/uploads/-/system/projects/import_export/relation_export_upload/export_file/#{subject.id}/#{filename}" + expect(subject.export_file.url).to eq(url) + end +end diff --git a/spec/support/matchers/event_store.rb b/spec/support/matchers/event_store.rb index eb5b37f39e5..14f6a42d7f4 100644 --- a/spec/support/matchers/event_store.rb +++ b/spec/support/matchers/event_store.rb @@ -1,6 +1,8 @@ # frozen_string_literal: true RSpec::Matchers.define :publish_event do |expected_event_class| + include RSpec::Matchers::Composable + supports_block_expectations match do |proc| @@ -15,10 +17,17 @@ RSpec::Matchers.define :publish_event do |expected_event_class| proc.call @events.any? do |event| - event.instance_of?(expected_event_class) && event.data == @expected_data + event.instance_of?(expected_event_class) && match_data?(event.data, @expected_data) end end + def match_data?(actual, expected) + values_match?(actual.keys, expected.keys) && + actual.keys.each do |key| + values_match?(actual[key], expected[key]) + end + end + chain :with do |expected_data| @expected_data = expected_data end