Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-09-16 06:07:38 +00:00
parent c711e6ae73
commit e4969e64d2
28 changed files with 246 additions and 53 deletions

View File

@ -185,7 +185,6 @@ gdk-qa-non-blocking:
variables:
QA_SCENARIO: Test::Instance::NonBlocking
QA_RUN_TYPE: gdk-qa-non-blocking
parallel: 5
allow_failure: true
artifacts:
paths:

View File

@ -1 +1 @@
63a9bf2c6fc43278a115463153a965b521224a02
6a0281c68969d9ce8f36fdaf242b4f6e0503d940

View File

@ -7,6 +7,8 @@ module Ci
MAX_JOB_NAME_LENGTH = 255
before_validation :set_project_id, on: :create
belongs_to :build,
->(need) { in_partition(need) },
class_name: 'Ci::Processable',
@ -19,11 +21,19 @@ module Ci
validates :build, presence: true
validates :name, presence: true, length: { maximum: MAX_JOB_NAME_LENGTH }
validates :optional, inclusion: { in: [true, false] }
validates :project_id, presence: true, on: :create
scope :scoped_build, -> {
where(arel_table[:build_id].eq(Ci::Build.arel_table[:id]))
.where(arel_table[:partition_id].eq(Ci::Build.arel_table[:partition_id]))
}
scope :artifacts, -> { where(artifacts: true) }
# TODO: This is temporary code to assist the backfilling of records for this epic: https://gitlab.com/groups/gitlab-org/-/epics/12323
# To be removed in 17.7: https://gitlab.com/gitlab-org/gitlab/-/issues/488163
#
def set_project_id
self.project_id ||= build&.project_id
end
end
end

View File

@ -0,0 +1,10 @@
---
migration_job_name: BackfillCiBuildNeedsProjectId
description: Backfills sharding key `ci_build_needs.project_id` from `p_ci_builds`.
feature_category: continuous_integration
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/163429
milestone: '17.4'
queued_migration_version: 20240827095907
# Replace with the approximate date you think it's best to ensure the completion of this BBM.
finalize_after: '2024-09-25'
finalized_by: # version of the migration that finalized this BBM

View File

@ -8,4 +8,14 @@ description: Dependencies for a specific CI/CD job.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/31328
milestone: '12.2'
gitlab_schema: gitlab_ci
sharding_key_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/458479
desired_sharding_key:
project_id:
references: projects
backfill_via:
parent:
foreign_key: build_id
table: p_ci_builds
sharding_key: project_id
belongs_to: build
foreign_key_name: fk_rails_3cf221d4ed_p
desired_sharding_key_migration_job_name: BackfillCiBuildNeedsProjectId

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddProjectIdToCiBuildNeeds < Gitlab::Database::Migration[2.2]
milestone '17.4'
def change
add_column :ci_build_needs, :project_id, :bigint
end
end

View File

@ -0,0 +1,61 @@
# frozen_string_literal: true
class QueueBackfillCiBuildNeedsProjectId < Gitlab::Database::Migration[2.2]
milestone '17.4'
# Select the applicable gitlab schema for your batched background migration
restrict_gitlab_migration gitlab_schema: :gitlab_ci
MIGRATION = "BackfillCiBuildNeedsProjectId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 25000
SUB_BATCH_SIZE = 150
GITLAB_OPTIMIZED_BATCH_SIZE = 75_000
GITLAB_OPTIMIZED_SUB_BATCH_SIZE = 250
def up
queue_batched_background_migration(
MIGRATION,
:ci_build_needs,
:id,
:project_id,
:p_ci_builds,
:project_id,
:build_id,
:partition_id,
job_interval: DELAY_INTERVAL,
**batch_sizes
)
end
def down
delete_batched_background_migration(
MIGRATION,
:ci_build_needs,
:id,
[
:project_id,
:p_ci_builds,
:project_id,
:build_id,
:partition_id
]
)
end
private
def batch_sizes
if Gitlab.com_except_jh?
{
batch_size: GITLAB_OPTIMIZED_BATCH_SIZE,
sub_batch_size: GITLAB_OPTIMIZED_SUB_BATCH_SIZE
}
else
{
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
}
end
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class PrepareAsyncIndexToProjectIdInCiBuildNeeds < Gitlab::Database::Migration[2.2]
milestone '17.4'
TABLE_NAME = :ci_build_needs
INDEX_NAME = :index_ci_build_needs_on_project_id
def up
prepare_async_index TABLE_NAME, :project_id, name: INDEX_NAME
end
def down
unprepare_async_index TABLE_NAME, :project_id, name: INDEX_NAME
end
end

View File

@ -0,0 +1 @@
6066e45bd25fb0a66d7bc31565a3e1703fbd6924568e1b89ebf4f60544f6b990

View File

@ -0,0 +1 @@
6fa109a94e4595b189b371fa6b828b19c1458109cca9c30a7d39ea68b4deab8c

View File

@ -0,0 +1 @@
a27b572726f2347cfa85d1ab75acefddf5c94d7e3e2d226331a548a6d2c4c651

View File

@ -7774,7 +7774,8 @@ CREATE TABLE ci_build_needs (
optional boolean DEFAULT false NOT NULL,
build_id bigint NOT NULL,
partition_id bigint NOT NULL,
id bigint NOT NULL
id bigint NOT NULL,
project_id bigint
);
CREATE SEQUENCE ci_build_needs_id_seq

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillCiBuildNeedsProjectId < BackfillDesiredShardingKeyPartitionJob
operation_name :backfill_ci_build_needs_project_id
feature_category :continuous_integration
end
end
end

View File

@ -13,6 +13,7 @@ RSpec.describe 'Database schema', feature_category: :database do
ai_testing_terms_acceptances: %w[user_id], # testing terms only have 1 entry, and if the user is deleted the record should remain
ci_build_trace_metadata: [%w[partition_id build_id], %w[partition_id trace_artifact_id]], # the index on build_id is enough
ci_builds: [%w[partition_id stage_id], %w[partition_id execution_config_id], %w[auto_canceled_by_partition_id auto_canceled_by_id], %w[upstream_pipeline_partition_id upstream_pipeline_id], %w[partition_id commit_id]], # https://gitlab.com/gitlab-org/gitlab/-/merge_requests/142804#note_1745483081
ci_build_needs: %w[project_id], # we will create async index, see https://gitlab.com/gitlab-org/gitlab/-/merge_requests/163429#note_2065627176
ci_daily_build_group_report_results: [%w[partition_id last_pipeline_id]], # index on last_pipeline_id is sufficient
ci_pipeline_artifacts: [%w[partition_id pipeline_id]], # index on pipeline_id is sufficient
ci_pipeline_chat_data: [%w[partition_id pipeline_id]], # index on pipeline_id is sufficient
@ -75,6 +76,7 @@ RSpec.describe 'Database schema', feature_category: :database do
chat_teams: %w[team_id],
ci_builds: %w[project_id runner_id user_id erased_by_id trigger_request_id partition_id auto_canceled_by_partition_id execution_config_id upstream_pipeline_partition_id],
ci_builds_metadata: %w[partition_id project_id build_id],
ci_build_needs: %w[project_id],
ci_daily_build_group_report_results: %w[partition_id],
ci_deleted_objects: %w[project_id],
ci_job_artifacts: %w[partition_id project_id job_id],

View File

@ -4,5 +4,6 @@ FactoryBot.define do
factory :ci_build_need, class: 'Ci::BuildNeed' do
build factory: :ci_build, scheduling_type: :dag
sequence(:name) { |n| "build_#{n}" }
project_id { build.project.id }
end
end

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillCiBuildNeedsProjectId, feature_category: :continuous_integration, migration: :gitlab_ci, schema: 20240821043019 do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :ci_build_needs }
let(:backfill_column) { :project_id }
let(:backfill_via_table) { :p_ci_builds }
let(:backfill_via_column) { :project_id }
let(:backfill_via_foreign_key) { :build_id }
let(:partition_column) { :partition_id }
end
end

View File

@ -0,0 +1,66 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillCiBuildNeedsProjectId, migration: :gitlab_ci, feature_category: :continuous_integration do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :ci_build_needs,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_ci,
job_arguments: [
:project_id,
:p_ci_builds,
:project_id,
:build_id,
:partition_id
]
)
}
end
end
context 'when executed on .com' do
before do
allow(Gitlab).to receive(:com_except_jh?).and_return(true)
end
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :ci_build_needs,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::GITLAB_OPTIMIZED_BATCH_SIZE,
sub_batch_size: described_class::GITLAB_OPTIMIZED_SUB_BATCH_SIZE,
gitlab_schema: :gitlab_ci,
job_arguments: [
:project_id,
:p_ci_builds,
:project_id,
:build_id,
:partition_id
]
)
}
end
end
end
end

View File

@ -9,6 +9,7 @@ RSpec.describe Ci::BuildNeed, model: true, feature_category: :continuous_integra
it { is_expected.to validate_presence_of(:build) }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_presence_of(:project_id) }
it { is_expected.to validate_length_of(:name).is_at_most(255) }
describe 'scopes' do
@ -65,7 +66,7 @@ RSpec.describe Ci::BuildNeed, model: true, feature_category: :continuous_integra
end
context 'without build' do
let(:build_need) { FactoryBot.build(:ci_build_need, build: nil) }
let(:build_need) { FactoryBot.build(:ci_build_need, build: nil, project_id: nil) }
it { is_expected.to validate_presence_of(:partition_id) }

View File

@ -441,8 +441,8 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
it 'returns all needs attributes' do
is_expected.to contain_exactly(
{ 'artifacts' => true, 'name' => 'test1', 'optional' => false, 'partition_id' => build.partition_id },
{ 'artifacts' => true, 'name' => 'test2', 'optional' => false, 'partition_id' => build.partition_id }
{ 'artifacts' => true, 'name' => 'test1', 'optional' => false, 'partition_id' => build.partition_id, 'project_id' => build.project_id },
{ 'artifacts' => true, 'name' => 'test2', 'optional' => false, 'partition_id' => build.partition_id, 'project_id' => build.project_id }
)
end
end

View File

@ -79,7 +79,6 @@ itself: # project
- web_url
- description_html
- repository_object_format
- pre_receive_secret_detection_enabled
build_auto_devops: # auto_devops
unexposed_attributes:

View File

@ -8,24 +8,10 @@ cmd/gitlab-workhorse/main.go:9:2: G108: Profiling endpoint is automatically expo
cmd/gitlab-workhorse/main.go:73: Function 'buildConfig' has too many statements (63 > 40) (funlen)
cmd/gitlab-workhorse/main.go:79:14: Error return value of `fmt.Fprintf` is not checked (errcheck)
cmd/gitlab-workhorse/main.go:80:14: Error return value of `fmt.Fprintf` is not checked (errcheck)
cmd/gitlab-workhorse/main.go:120:15: Error return value of `fmt.Fprintln` is not checked (errcheck)
cmd/gitlab-workhorse/main.go:168: Function 'run' has too many statements (61 > 40) (funlen)
cmd/gitlab-workhorse/main.go:173:20: Error return value of `closer.Close` is not checked (errcheck)
cmd/gitlab-workhorse/main.go:181:6: shadow: declaration of "err" shadows declaration at line 169 (govet)
cmd/gitlab-workhorse/main.go:193:6: shadow: declaration of "err" shadows declaration at line 169 (govet)
cmd/gitlab-workhorse/main.go:198:30: G114: Use of net/http serve function that has no support for setting timeouts (gosec)
cmd/gitlab-workhorse/main.go:203:6: shadow: declaration of "err" shadows declaration at line 169 (govet)
cmd/gitlab-workhorse/main.go:212:6: shadow: declaration of "err" shadows declaration at line 169 (govet)
cmd/gitlab-workhorse/main.go:233:5: shadow: declaration of "err" shadows declaration at line 169 (govet)
cmd/gitlab-workhorse/main.go:241:26: Error return value of `accessCloser.Close` is not checked (errcheck)
cmd/gitlab-workhorse/main.go:265:10: G112: Potential Slowloris Attack because ReadHeaderTimeout is not configured in the http.Server (gosec)
cmd/gitlab-workhorse/main_test.go:60:2: exitAfterDefer: os.Exit will exit, and `defer gitaly.CloseConnections()` will not run (gocritic)
cmd/gitlab-workhorse/proxy_test.go:55:9: shadow: declaration of "err" shadows declaration at line 36 (govet)
cmd/gitlab-workhorse/proxy_test.go:77:6: var-naming: var tsUrl should be tsURL (revive)
cmd/gitlab-workhorse/proxy_test.go:87:6: shadow: declaration of "err" shadows declaration at line 78 (govet)
cmd/gitlab-workhorse/raven.go:17:14: Error return value of `raven.SetDSN` is not checked (errcheck)
cmd/gitlab-workhorse/upload_test.go:372:4: require-error: for error assertions use require (testifylint)
cmd/gitlab-workhorse/upload_test.go:377:4: require-error: for error assertions use require (testifylint)
cmd/gitlab-zip-cat/main.go:1:1: package-comments: should have a package comment (revive)
cmd/gitlab-zip-cat/main.go:19:5: exported: exported var Version should have comment or be unexported (revive)
cmd/gitlab-zip-cat/main.go:66:20: Error return value of `reader.Close` is not checked (errcheck)
@ -189,17 +175,12 @@ internal/upload/destination/destination.go:72: internal/upload/destination/desti
internal/upload/destination/destination.go:117: Function 'Upload' has too many statements (49 > 40) (funlen)
internal/upload/destination/multi_hash.go:4:2: G501: Blocklisted import crypto/md5: weak cryptographic primitive (gosec)
internal/upload/destination/multi_hash.go:5:2: G505: Blocklisted import crypto/sha1: weak cryptographic primitive (gosec)
internal/upload/destination/objectstore/object_test.go:127:4: go-require: do not use assert.FailNow in http handlers (testifylint)
internal/upload/destination/objectstore/test/objectstore_stub.go:4:2: G501: Blocklisted import crypto/md5: weak cryptographic primitive (gosec)
internal/upload/destination/objectstore/test/objectstore_stub.go:169:13: G401: Use of weak cryptographic primitive (gosec)
internal/upload/destination/objectstore/upload_strategy.go:29: internal/upload/destination/objectstore/upload_strategy.go:29: Line contains TODO/BUG/FIXME/NOTE/OPTIMIZE/HACK: "TODO: consider adding the context to the..." (godox)
internal/upload/destination/objectstore/uploader.go:5:2: G501: Blocklisted import crypto/md5: weak cryptographic primitive (gosec)
internal/upload/destination/objectstore/uploader.go:95:12: G401: Use of weak cryptographic primitive (gosec)
internal/upload/exif/exif.go:103:10: G204: Subprocess launched with variable (gosec)
internal/upload/uploads.go:62:16: Error return value of `fmt.Fprintln` is not checked (errcheck)
internal/upload/uploads.go:101:15: Error return value of `fmt.Fprintln` is not checked (errcheck)
internal/upload/uploads_test.go:527:3: negative-positive: use assert.Positive (testifylint)
internal/upload/uploads_test.go:545:3: negative-positive: use assert.Positive (testifylint)
internal/upstream/routes.go:170:74: `(*upstream).wsRoute` - `matchers` always receives `nil` (unparam)
internal/upstream/routes.go:230: Function 'configureRoutes' is too long (333 > 60) (funlen)
internal/upstream/routes.go:479: internal/upstream/routes.go:479: Line contains TODO/BUG/FIXME/NOTE/OPTIMIZE/HACK: "TODO: We should probably not return a HT..." (godox)

View File

@ -117,7 +117,7 @@ func buildConfig(arg0 string, args []string) (*bootConfig, *config.Config, error
}
if fset.NArg() > 0 {
err := alreadyPrintedError{fmt.Errorf("unexpected arguments: %v", fset.Args())}
fmt.Fprintln(fset.Output(), err)
_, _ = fmt.Fprintln(fset.Output(), err)
fset.Usage()
return nil, nil, err
}
@ -170,7 +170,7 @@ func run(boot bootConfig, cfg config.Config) error {
if err != nil {
return err
}
defer closer.Close()
defer closer.Close() //nolint:errcheck
tracing.Initialize(tracing.WithServiceName("gitlab-workhorse"))
log.WithField("version", Version).WithField("build_time", BuildTime).Print("Starting")
@ -178,7 +178,7 @@ func run(boot bootConfig, cfg config.Config) error {
// Good housekeeping for Unix sockets: unlink before binding
if boot.listenNetwork == "unix" {
if err := os.Remove(boot.listenAddr); err != nil && !os.IsNotExist(err) {
if err = os.Remove(boot.listenAddr); err != nil && !os.IsNotExist(err) {
return err
}
}
@ -190,9 +190,9 @@ func run(boot bootConfig, cfg config.Config) error {
// having no profiler HTTP listener by default, the profiler is
// effectively disabled by default.
if boot.pprofListenAddr != "" {
l, err := net.Listen("tcp", boot.pprofListenAddr)
if err != nil {
return fmt.Errorf("pprofListenAddr: %v", err)
l, tcpErr := net.Listen("tcp", boot.pprofListenAddr)
if tcpErr != nil {
return fmt.Errorf("pprofListenAddr: %v", tcpErr)
}
go func() { finalErrors <- http.Serve(l, nil) }()
@ -200,16 +200,16 @@ func run(boot bootConfig, cfg config.Config) error {
monitoringOpts := []monitoring.Option{monitoring.WithBuildInformation(Version, BuildTime)}
if cfg.MetricsListener != nil {
l, err := newListener("metrics", *cfg.MetricsListener)
if err != nil {
return err
l, metricErr := newListener("metrics", *cfg.MetricsListener)
if metricErr != nil {
return metricErr
}
monitoringOpts = append(monitoringOpts, monitoring.WithListener(l))
}
go func() {
// Unlike http.Serve, which always returns a non-nil error,
// monitoring.Start may return nil in which case we should not shut down.
if err := monitoring.Start(monitoringOpts...); err != nil {
if err = monitoring.Start(monitoringOpts...); err != nil {
finalErrors <- err
}
}()
@ -230,7 +230,7 @@ func run(boot bootConfig, cfg config.Config) error {
watchKeyFn := redisKeyWatcher.WatchKey
if err := cfg.RegisterGoCloudURLOpeners(); err != nil {
if err = cfg.RegisterGoCloudURLOpeners(); err != nil {
return fmt.Errorf("register cloud credentials: %v", err)
}
@ -238,7 +238,7 @@ func run(boot bootConfig, cfg config.Config) error {
if err != nil {
return fmt.Errorf("configure access logger: %v", err)
}
defer accessCloser.Close()
defer accessCloser.Close() //nolint:errcheck
gitaly.InitializeSidechannelRegistry(accessLogger)

View File

@ -52,8 +52,8 @@ func TestProxyRequest(t *testing.T) {
"expect Gitlab-Workhorse-Proxy-Start to start with 1",
)
body, err := io.ReadAll(r.Body)
assert.NoError(t, err, "read body")
body, requestErr := io.ReadAll(r.Body)
assert.NoError(t, requestErr, "read body")
assert.Equal(t, "REQUEST", string(body), "body contents")
w.Header().Set("Custom-Response-Header", "test")
@ -74,20 +74,20 @@ func TestProxyRequest(t *testing.T) {
}
func TestProxyWithForcedTargetHostHeader(t *testing.T) {
var tsUrl *url.URL
var tsURL *url.URL
inboundURL, err := url.Parse("https://explicitly.set.host/url/path")
require.NoError(t, err, "parse upstream url")
urlRegexp := regexp.MustCompile(fmt.Sprintf(`%s\z`, inboundURL.Path))
ts := testhelper.TestServerWithHandler(urlRegexp, func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, tsUrl.Host, r.Host, "upstream host header")
assert.Equal(t, tsURL.Host, r.Host, "upstream host header")
assert.Equal(t, inboundURL.Host, r.Header.Get("X-Forwarded-Host"), "X-Forwarded-Host header")
assert.Equal(t, fmt.Sprintf("host=%s", inboundURL.Host), r.Header.Get("Forwarded"), "Forwarded header")
_, err := w.Write([]byte(`ok`))
_, err = w.Write([]byte(`ok`))
assert.NoError(t, err, "write ok response")
})
tsUrl, err = url.Parse(ts.URL)
tsURL, err = url.Parse(ts.URL)
require.NoError(t, err, "parse testserver URL")
httpRequest, err := http.NewRequest("POST", inboundURL.String(), nil)

View File

@ -14,7 +14,7 @@ func wrapRaven(h http.Handler) http.Handler {
// clashes with gitlab-rails.
sentryDSN := os.Getenv("GITLAB_WORKHORSE_SENTRY_DSN")
sentryEnvironment := os.Getenv("GITLAB_WORKHORSE_SENTRY_ENVIRONMENT")
raven.SetDSN(sentryDSN) // sentryDSN may be empty
_ = raven.SetDSN(sentryDSN) // sentryDSN may be empty
if sentryEnvironment != "" {
raven.SetEnvironment(sentryEnvironment)

View File

@ -369,12 +369,12 @@ func TestBlockingRewrittenFieldsHeader(t *testing.T) {
ws := startWorkhorseServer(t, ts.URL)
req, err := http.NewRequest("POST", ws.URL+"/something", tc.body)
assert.NoError(t, err)
require.NoError(t, err)
req.Header.Set("Content-Type", tc.contentType)
req.Header.Set(upload.RewrittenFieldsHeader, canary)
resp, err := http.DefaultClient.Do(req)
assert.NoError(t, err)
require.NoError(t, err)
defer resp.Body.Close()
assert.Equal(t, 200, resp.StatusCode, "status code")

View File

@ -124,7 +124,7 @@ func TestObjectUploadBrokenConnection(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
hj, ok := w.(http.Hijacker)
if !ok {
assert.FailNow(t, "webserver doesn't support hijacking")
assert.Fail(t, "webserver doesn't support hijacking")
}
conn, _, err := hj.Hijack()
if err != nil {

View File

@ -59,7 +59,7 @@ func interceptMultipartFiles(w http.ResponseWriter, r *http.Request, h http.Hand
writer := multipart.NewWriter(&body)
defer func() {
if writerErr := writer.Close(); writerErr != nil {
fmt.Fprintln(w, writerErr.Error())
_, _ = fmt.Fprintln(w, writerErr.Error())
}
}()
@ -98,7 +98,7 @@ func interceptMultipartFiles(w http.ResponseWriter, r *http.Request, h http.Hand
// Close writer
if writerErr := writer.Close(); writerErr != nil {
fmt.Fprintln(w, writerErr.Error())
_, _ = fmt.Fprintln(w, writerErr.Error())
}
// Hijack the request

View File

@ -524,7 +524,7 @@ func TestUploadHandlerRemovingExif(t *testing.T) {
size, err := strconv.Atoi(r.FormValue("file.size"))
assert.NoError(t, err)
assert.Less(t, size, len(content), "Expected the file to be smaller after removal of exif")
assert.Greater(t, size, 0, "Expected to receive non-empty file")
assert.Positive(t, size, 0, "Expected to receive non-empty file")
w.WriteHeader(200)
fmt.Fprint(w, "RESPONSE")
@ -542,7 +542,7 @@ func TestUploadHandlerRemovingExifTiff(t *testing.T) {
size, err := strconv.Atoi(r.FormValue("file.size"))
assert.NoError(t, err)
assert.Less(t, size, len(content), "Expected the file to be smaller after removal of exif")
assert.Greater(t, size, 0, "Expected to receive not empty file")
assert.Positive(t, size, 0, "Expected to receive not empty file")
w.WriteHeader(200)
fmt.Fprint(w, "RESPONSE")