Merge branch 'main' into e2e-teardown

This commit is contained in:
Prasanth Baskar 2025-07-19 13:16:36 +05:30 committed by GitHub
commit 9e9a5c24bc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
41 changed files with 2072 additions and 90 deletions

View File

@ -93,7 +93,12 @@ VERSIONTAG=dev
BUILD_BASE=true
PUSHBASEIMAGE=false
BASEIMAGETAG=dev
BUILDBASETARGET=trivy-adapter core db jobservice log nginx portal prepare redis registry registryctl exporter
# for skip build prepare and log container while BUILD_INSTALLER=false
BUILD_INSTALLER=true
BUILDBASETARGET=trivy-adapter core db jobservice nginx portal redis registry registryctl exporter
ifeq ($(BUILD_INSTALLER), true)
BUILDBASETARGET += prepare log
endif
IMAGENAMESPACE=goharbor
BASEIMAGENAMESPACE=goharbor
# #input true/false only
@ -130,6 +135,7 @@ endef
# docker parameters
DOCKERCMD=$(shell which docker)
DOCKERBUILD=$(DOCKERCMD) build
DOCKERNETWORK=default
DOCKERRMIMAGE=$(DOCKERCMD) rmi
DOCKERPULL=$(DOCKERCMD) pull
DOCKERIMAGES=$(DOCKERCMD) images
@ -239,10 +245,8 @@ REGISTRYUSER=
REGISTRYPASSWORD=
# cmds
DOCKERSAVE_PARA=$(DOCKER_IMAGE_NAME_PREPARE):$(VERSIONTAG) \
$(DOCKERIMAGENAME_PORTAL):$(VERSIONTAG) \
DOCKERSAVE_PARA=$(DOCKERIMAGENAME_PORTAL):$(VERSIONTAG) \
$(DOCKERIMAGENAME_CORE):$(VERSIONTAG) \
$(DOCKERIMAGENAME_LOG):$(VERSIONTAG) \
$(DOCKERIMAGENAME_DB):$(VERSIONTAG) \
$(DOCKERIMAGENAME_JOBSERVICE):$(VERSIONTAG) \
$(DOCKERIMAGENAME_REGCTL):$(VERSIONTAG) \
@ -250,6 +254,18 @@ DOCKERSAVE_PARA=$(DOCKER_IMAGE_NAME_PREPARE):$(VERSIONTAG) \
$(IMAGENAMESPACE)/nginx-photon:$(VERSIONTAG) \
$(IMAGENAMESPACE)/registry-photon:$(VERSIONTAG)
ifeq ($(BUILD_INSTALLER), true)
DOCKERSAVE_PARA+= $(DOCKER_IMAGE_NAME_PREPARE):$(VERSIONTAG) \
$(DOCKERIMAGENAME_LOG):$(VERSIONTAG)
endif
ifeq ($(TRIVYFLAG), true)
DOCKERSAVE_PARA+= $(IMAGENAMESPACE)/trivy-adapter-photon:$(VERSIONTAG)
endif
ifeq ($(EXPORTERFLAG), true)
DOCKERSAVE_PARA+= $(DOCKERIMAGENAME_EXPORTER):$(VERSIONTAG)
endif
PACKAGE_OFFLINE_PARA=-zcvf harbor-offline-installer-$(PKGVERSIONTAG).tgz \
$(HARBORPKG)/$(DOCKERIMGFILE).$(VERSIONTAG).tar.gz \
$(HARBORPKG)/prepare \
@ -266,13 +282,6 @@ PACKAGE_ONLINE_PARA=-zcvf harbor-online-installer-$(PKGVERSIONTAG).tgz \
DOCKERCOMPOSE_FILE_OPT=-f $(DOCKERCOMPOSEFILEPATH)/$(DOCKERCOMPOSEFILENAME)
ifeq ($(TRIVYFLAG), true)
DOCKERSAVE_PARA+= $(IMAGENAMESPACE)/trivy-adapter-photon:$(VERSIONTAG)
endif
ifeq ($(EXPORTERFLAG), true)
DOCKERSAVE_PARA+= $(DOCKERIMAGENAME_EXPORTER):$(VERSIONTAG)
endif
RUNCONTAINER=$(DOCKERCMD) run --rm -u $(shell id -u):$(shell id -g) -v $(BUILDPATH):$(BUILDPATH) -w $(BUILDPATH)
# $1 the name of the docker image
@ -395,7 +404,9 @@ build:
-e REGISTRYVERSION=$(REGISTRYVERSION) -e REGISTRY_SRC_TAG=$(REGISTRY_SRC_TAG) -e DISTRIBUTION_SRC=$(DISTRIBUTION_SRC)\
-e TRIVYVERSION=$(TRIVYVERSION) -e TRIVYADAPTERVERSION=$(TRIVYADAPTERVERSION) \
-e VERSIONTAG=$(VERSIONTAG) \
-e DOCKERNETWORK=$(DOCKERNETWORK) \
-e BUILDREG=$(BUILDREG) -e BUILDTRIVYADP=$(BUILDTRIVYADP) \
-e BUILD_INSTALLER=$(BUILD_INSTALLER) \
-e NPM_REGISTRY=$(NPM_REGISTRY) -e BASEIMAGETAG=$(BASEIMAGETAG) -e IMAGENAMESPACE=$(IMAGENAMESPACE) -e BASEIMAGENAMESPACE=$(BASEIMAGENAMESPACE) \
-e REGISTRYURL=$(REGISTRYURL) \
-e TRIVY_DOWNLOAD_URL=$(TRIVY_DOWNLOAD_URL) -e TRIVY_ADAPTER_DOWNLOAD_URL=$(TRIVY_ADAPTER_DOWNLOAD_URL) \
@ -442,7 +453,14 @@ package_online: update_prepare_version
@rm -rf $(HARBORPKG)
@echo "Done."
package_offline: update_prepare_version compile build
.PHONY: check_buildinstaller
check_buildinstaller:
@if [ "$(BUILD_INSTALLER)" != "true" ]; then \
echo "Must set BUILD_INSTALLER as true while triggering package_offline build" ; \
exit 1; \
fi
package_offline: check_buildinstaller update_prepare_version compile build
@echo "packing offline package ..."
@cp -r make $(HARBORPKG)

View File

@ -16,10 +16,9 @@ Patch releases are based on the major/minor release branch, the release cadence
### Minor Release Support Matrix
| Version | Supported |
|----------------| ------------------ |
| Harbor v2.13.x | :white_check_mark: |
| Harbor v2.12.x | :white_check_mark: |
| Harbor v2.11.x | :white_check_mark: |
| Harbor v2.10.x | :white_check_mark: |
### Upgrade path and support policy
The upgrade path for Harbor is (1) 2.2.x patch releases are always compatible with its major and minor versions. For example, previous released 2.2.x can be upgraded to most recent 2.2.3 release. (2) Harbor only supports two previous minor releases to upgrade to current minor release. For example, 2.3.0 will only support 2.1.0 and 2.2.0 to upgrade from, 2.0.0 to 2.3.0 is not supported. One should upgrade to 2.2.0 first, then to 2.3.0.

View File

@ -336,6 +336,8 @@ paths:
responses:
'200':
$ref: '#/responses/200'
'400':
$ref: '#/responses/400'
'404':
$ref: '#/responses/404'
'500':
@ -3560,6 +3562,8 @@ paths:
responses:
'200':
$ref: '#/responses/200'
'400':
$ref: '#/responses/400'
'401':
$ref: '#/responses/401'
'403':
@ -3998,6 +4002,8 @@ paths:
responses:
'200':
$ref: '#/responses/200'
'400':
$ref: '#/responses/400'
'401':
$ref: '#/responses/401'
'403':
@ -6138,6 +6144,7 @@ paths:
cve_id(exact match)
cvss_score_v3(range condition)
severity(exact match)
status(exact match)
repository_name(exact match)
project_id(exact match)
package(exact match)
@ -10066,6 +10073,9 @@ definitions:
severity:
type: string
description: the severity of the vulnerability
status:
type: string
description: the status of the vulnerability, example "fixed", "won't fix"
cvss_v3_score:
type: number
format: float

View File

@ -6,3 +6,4 @@ ALTER SEQUENCE permission_policy_id_seq AS BIGINT;
ALTER TABLE role_permission ALTER COLUMN permission_policy_id TYPE BIGINT;
ALTER TABLE vulnerability_record ADD COLUMN IF NOT EXISTS status text;

View File

@ -18,7 +18,7 @@ TIMESTAMP=$(shell date +"%Y%m%d")
# docker parameters
DOCKERCMD=$(shell which docker)
DOCKERBUILD=$(DOCKERCMD) build --no-cache
DOCKERBUILD=$(DOCKERCMD) build --no-cache --network=$(DOCKERNETWORK)
DOCKERBUILD_WITH_PULL_PARA=$(DOCKERBUILD) --pull=$(PULL_BASE_FROM_DOCKERHUB)
DOCKERRMIMAGE=$(DOCKERCMD) rmi
DOCKERIMAGES=$(DOCKERCMD) images
@ -154,7 +154,7 @@ _build_trivy_adapter:
$(call _extract_archive, $(TRIVY_ADAPTER_DOWNLOAD_URL), $(DOCKERFILEPATH_TRIVY_ADAPTER)/binary/) ; \
else \
echo "Building Trivy adapter $(TRIVYADAPTERVERSION) from sources..." ; \
cd $(DOCKERFILEPATH_TRIVY_ADAPTER) && $(DOCKERFILEPATH_TRIVY_ADAPTER)/builder.sh $(TRIVYADAPTERVERSION) $(GOBUILDIMAGE) && cd - ; \
cd $(DOCKERFILEPATH_TRIVY_ADAPTER) && $(DOCKERFILEPATH_TRIVY_ADAPTER)/builder.sh $(TRIVYADAPTERVERSION) $(GOBUILDIMAGE) $(DOCKERNETWORK) && cd - ; \
fi ; \
echo "Building Trivy adapter container for photon..." ; \
$(DOCKERBUILD_WITH_PULL_PARA) --build-arg harbor_base_image_version=$(BASEIMAGETAG) \
@ -178,7 +178,7 @@ _build_registry:
rm -rf $(DOCKERFILEPATH_REG)/binary && mkdir -p $(DOCKERFILEPATH_REG)/binary && \
$(call _get_binary, $(REGISTRYURL), $(DOCKERFILEPATH_REG)/binary/registry); \
else \
cd $(DOCKERFILEPATH_REG) && $(DOCKERFILEPATH_REG)/builder $(REGISTRY_SRC_TAG) $(DISTRIBUTION_SRC) $(GOBUILDIMAGE) && cd - ; \
cd $(DOCKERFILEPATH_REG) && $(DOCKERFILEPATH_REG)/builder $(REGISTRY_SRC_TAG) $(DISTRIBUTION_SRC) $(GOBUILDIMAGE) $(DOCKERNETWORK) && cd - ; \
fi
@echo "building registry container for photon..."
@chmod 655 $(DOCKERFILEPATH_REG)/binary/registry && $(DOCKERBUILD_WITH_PULL_PARA) --build-arg harbor_base_image_version=$(BASEIMAGETAG) --build-arg harbor_base_namespace=$(BASEIMAGENAMESPACE) -f $(DOCKERFILEPATH_REG)/$(DOCKERFILENAME_REG) -t $(DOCKERIMAGENAME_REG):$(VERSIONTAG) .
@ -233,10 +233,17 @@ define _build_base
fi
endef
build: _build_prepare _build_db _build_portal _build_core _build_jobservice _build_log _build_nginx _build_registry _build_registryctl _build_trivy_adapter _build_redis _compile_and_build_exporter
ifeq ($(BUILD_INSTALLER), true)
buildcompt: _build_prepare _build_db _build_portal _build_core _build_jobservice _build_log _build_nginx _build_registry _build_registryctl _build_trivy_adapter _build_redis _compile_and_build_exporter
else
buildcompt: _build_db _build_portal _build_core _build_jobservice _build_nginx _build_registry _build_registryctl _build_trivy_adapter _build_redis _compile_and_build_exporter
endif
build: buildcompt
@if [ -n "$(REGISTRYUSER)" ] && [ -n "$(REGISTRYPASSWORD)" ] ; then \
docker logout ; \
fi
cleanimage:
@echo "cleaning image for photon..."
- $(DOCKERRMIMAGE) -f $(DOCKERIMAGENAME_PORTAL):$(VERSIONTAG)

View File

@ -23,7 +23,6 @@ HTTPS_PROXY={{jobservice_https_proxy}}
NO_PROXY={{jobservice_no_proxy}}
REGISTRY_CREDENTIAL_USERNAME={{registry_username}}
REGISTRY_CREDENTIAL_PASSWORD={{registry_password}}
MAX_JOB_DURATION_SECONDS={{max_job_duration_seconds}}
{% if metric.enabled %}
METRIC_NAMESPACE=harbor

View File

@ -227,7 +227,6 @@ def parse_yaml_config(config_file_path, with_trivy):
value = config_dict["max_job_duration_hours"]
if not isinstance(value, int) or value < 24:
config_dict["max_job_duration_hours"] = 24
config_dict['max_job_duration_seconds'] = config_dict['max_job_duration_hours'] * 3600
config_dict['job_loggers'] = js_config["job_loggers"]
config_dict['logger_sweeper_duration'] = js_config["logger_sweeper_duration"]
config_dict['jobservice_secret'] = generate_random_string(16)

View File

@ -34,7 +34,6 @@ def prepare_job_service(config_dict):
internal_tls=config_dict['internal_tls'],
max_job_workers=config_dict['max_job_workers'],
max_job_duration_hours=config_dict['max_job_duration_hours'],
max_job_duration_seconds=config_dict['max_job_duration_seconds'],
job_loggers=config_dict['job_loggers'],
logger_sweeper_duration=config_dict['logger_sweeper_duration'],
redis_url=config_dict['redis_url_js'],

View File

@ -15,6 +15,7 @@ fi
VERSION="$1"
DISTRIBUTION_SRC="$2"
GOBUILDIMAGE="$3"
DOCKERNETWORK="$4"
set -e
@ -33,7 +34,7 @@ cd $cur
echo 'build the registry binary ...'
cp Dockerfile.binary $TEMP
docker build --build-arg golang_image=$GOBUILDIMAGE -f $TEMP/Dockerfile.binary -t registry-golang $TEMP
docker build --network=$DOCKERNETWORK --build-arg golang_image=$GOBUILDIMAGE -f $TEMP/Dockerfile.binary -t registry-golang $TEMP
echo 'copy the registry binary to local...'
ID=$(docker create registry-golang)

View File

@ -9,6 +9,7 @@ fi
VERSION="$1"
GOBUILDIMAGE="$2"
DOCKERNETWORK="$3"
set -e
@ -22,7 +23,7 @@ cd $TEMP; git checkout $VERSION; cd -
echo "Building Trivy adapter binary ..."
cp Dockerfile.binary $TEMP
docker build --build-arg golang_image=$GOBUILDIMAGE -f $TEMP/Dockerfile.binary -t trivy-adapter-golang $TEMP
docker build --network=$DOCKERNETWORK --build-arg golang_image=$GOBUILDIMAGE -f $TEMP/Dockerfile.binary -t trivy-adapter-golang $TEMP
echo "Copying Trivy adapter binary from the container to the local directory..."
ID=$(docker create trivy-adapter-golang)

View File

@ -42,6 +42,7 @@ const (
jobServiceRedisIdleConnTimeoutSecond = "JOB_SERVICE_POOL_REDIS_CONN_IDLE_TIMEOUT_SECOND"
jobServiceAuthSecret = "JOBSERVICE_SECRET"
coreURL = "CORE_URL"
maxJobDurationSeconds = "MAX_JOB_DURATION_SECONDS"
// JobServiceProtocolHTTPS points to the 'https' protocol
JobServiceProtocolHTTPS = "https"
@ -182,7 +183,19 @@ func (c *Configuration) Load(yamlFilePath string, detectEnv bool) error {
}
// Validate settings
return c.validate()
if err := c.validate(); err != nil {
return err
}
initMaxJobDurationEnv()
return nil
}
func initMaxJobDurationEnv() {
// set environment for gocraft/work if not present in env, it will be used to expire the job service redis key
if len(os.Getenv(maxJobDurationSeconds)) == 0 {
duration := MaxUpdateDuration()
os.Setenv(maxJobDurationSeconds, fmt.Sprintf("%v", duration.Seconds()))
}
}
// GetAuthSecret get the auth secret from the env

View File

@ -68,6 +68,7 @@ type VulnerabilityRecord struct {
PackageVersion string `orm:"column(package_version)"`
PackageType string `orm:"column(package_type)"`
Severity string `orm:"column(severity)"`
Status string `orm:"column(status)"`
Fix string `orm:"column(fixed_version);null"`
URLs string `orm:"column(urls);null"`
CVE3Score *float64 `orm:"column(cvss_score_v3);null"`

View File

@ -60,7 +60,6 @@ func (c *nativeToRelationalSchemaConverter) ToRelationalSchema(ctx context.Conte
log.G(ctx).Infof("There is no vulnerability report to toSchema for report UUID : %s", reportUUID)
return reportUUID, "", nil
}
// parse the raw report with the V1 schema of the report to the normalized structures
rawReport := new(vuln.Report)
if err := json.Unmarshal([]byte(reportData), &rawReport); err != nil {
@ -136,15 +135,18 @@ func (c *nativeToRelationalSchemaConverter) toSchema(ctx context.Context, report
recordIDs = append(recordIDs, record.ID)
if record.Severity != v.Severity.String() {
if record.Severity != v.Severity.String() || record.Status != v.Status {
record.Status = v.Status
record.Severity = v.Severity.String()
record.CVE3Score = v.CVSSDetails.ScoreV3
record.Fix = v.FixVersion
outOfDateRecords = append(outOfDateRecords, record)
}
}
for _, record := range outOfDateRecords {
// Update the severity, fixed_version, and cvss_score_v3 of the record when it's changed in the scanner, closes #14745 #21463
if err := c.dao.Update(ctx, record, "severity", "fixed_version", "cvss_score_v3"); err != nil {
if err := c.dao.Update(ctx, record, "severity", "fixed_version", "cvss_score_v3", "status"); err != nil {
return err
}
}
@ -247,6 +249,7 @@ func toVulnerabilityRecord(ctx context.Context, item *vuln.VulnerabilityItem, re
record.URLs = strings.Join(item.Links, "|")
record.RegistrationUUID = registrationUUID
record.Severity = item.Severity.String()
record.Status = item.Status
// process the CVSS scores if the data is available
if (vuln.CVSS{} != item.CVSSDetails) {
@ -294,6 +297,7 @@ func toVulnerabilityItem(record *scan.VulnerabilityRecord, artifactDigest string
urls := strings.Split(record.URLs, "|")
item.Links = append(item.Links, urls...)
item.Severity = vuln.ParseSeverityVersion3(record.Severity)
item.Status = record.Status
item.Package = record.Package
var vendorAttributes map[string]any
_ = json.Unmarshal([]byte(record.VendorAttributes), &vendorAttributes)

View File

@ -206,6 +206,8 @@ type VulnerabilityItem struct {
FixVersion string `json:"fix_version"`
// A standard scale for measuring the severity of a vulnerability.
Severity Severity `json:"severity"`
// The status of the vulnerability.
Status string `json:"status"`
// example: dpkg-source in dpkg 1.3.0 through 1.18.23 is able to use a non-GNU patch program
// and does not offer a protection mechanism for blank-indented diff hunks, which allows remote
// attackers to conduct directory traversal attacks via a crafted Debian source package, as

View File

@ -83,7 +83,7 @@ ORDER BY vr.cvss_score_v3 DESC, severity_level DESC
LIMIT 5`
// sql to query vulnerabilities
vulnerabilitySQL = `select vr.cve_id, vr.cvss_score_v3, vr.package, a.repository_name, a.id artifact_id, a.digest, vr.package, vr.package_version, vr.severity, vr.fixed_version, vr.description, vr.urls, a.project_id
vulnerabilitySQL = `select vr.cve_id, vr.cvss_score_v3, vr.package, a.repository_name, a.id artifact_id, a.digest, vr.package, vr.package_version, vr.severity, vr.status, vr.fixed_version, vr.description, vr.urls, a.project_id
from artifact a,
scan_report s,
report_vulnerability_record rvr,
@ -112,6 +112,7 @@ type filterMetaData struct {
var filterMap = map[string]*filterMetaData{
"cve_id": &filterMetaData{DataType: stringType},
"severity": &filterMetaData{DataType: stringType},
"status": &filterMetaData{DataType: stringType},
"cvss_score_v3": &filterMetaData{DataType: rangeType, FilterFunc: rangeFilter},
"project_id": &filterMetaData{DataType: stringType},
"repository_name": &filterMetaData{DataType: stringType},

View File

@ -1,25 +1,50 @@
![Harbor UI](https://raw.githubusercontent.com/goharbor/website/master/docs/img/readme/harbor_logo.png)
Harbor UI
============
This is the project based on Clarity and Angular to build Harbor UI.
# Harbor UI
This project is the web interface for [Harbor](https://goharbor.io), built using [Clarity Design System](https://clarity.design/) and Angular.
## Getting Started
Start
============
1. Use the specified Node version:
Run the following command to use the Node version specified in the .nvmrc file:
```bash
nvm install # Install the Node version specified in .nvmrc (if not already installed)
### 1. Use the correct Node version
To ensure compatibility with dependencies, use the Node version defined in `.nvmrc`.
```
nvm install # Install the Node version from .nvmrc (if not already installed)
nvm use # Switch to the specified Node version
```
This step helps avoid compatibility issues, especially with dependencies.
2. npm install (should trigger 'npm postinstall')
3. npm run postinstall (if not triggered, manually run this step)
4. copy "proxy.config.mjs.temp" file to "proxy.config.mjs"
`cp proxy.config.mjs.temp proxy.config.mjs`
5. Modify "proxy.config.mjs" to specify a Harbor server. And you can specify the agent if you work behind a corporate proxy
6. npm run start
7. open your browser on https://localhost:4200
### 2. Install dependencies
```
npm install
```
> Note: `npm install` should automatically trigger the `postinstall` script.
If `postinstall` scripts were not triggered, then run manually: `npm run postinstall`
### 3. Copy the template proxy file
```
cp proxy.config.mjs.temp proxy.config.mjs
```
### 4. Configure the proxy
Edit `proxy.config.mjs` to specify the Harbor server.
You can specify the agent if you work behind a corporate proxy.
### 5. Start the development server
```
npm run start
```
### 6. Open the application
Open your browser at https://localhost:4200

View File

@ -317,7 +317,7 @@
</clr-input-container>
<div class="clr-form-control">
<label for="disableAuditLogEventList" class="clr-control-label">
{{ 'CLEARANCES.DISABLE_AUDIT_LOG_EVENT_TYPE' | translate }}
{{ 'CLEARANCES.ENABLE_AUDIT_LOG_EVENT_TYPE' | translate }}
<clr-tooltip>
<clr-icon
clrTooltipTrigger
@ -328,7 +328,7 @@
clrSize="lg"
*clrIfOpen>
<span>{{
'CLEARANCES.DISABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP'
'CLEARANCES.ENABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP'
| translate
}}</span>
</clr-tooltip-content>
@ -337,7 +337,7 @@
<div *ngIf="logEventTypes.length === 0">
{{ 'CLEARANCES.AUDIT_LOG_EVENT_TYPE_EMPTY' | translate }}
</div>
<div class="clr-control-container">
<div class="clr-control-container audit-log-type">
<div
class="clr-checkbox-wrapper float-left"
*ngFor="let item of logEventTypes">

View File

@ -190,6 +190,10 @@ $message-type-width: 12rem;
margin-right: 0.25rem;
}
.audit-log-type {
max-width: 20rem;
}
.duration {
width: $input-width;
display: flex;

View File

@ -99,7 +99,7 @@ export class SystemSettingsComponent
HarborEvent.REFRESH_BANNER_MESSAGE,
() => {
this.setValueForBannerMessage();
this.setValueForDisabledAuditLogEventTypes();
this.setValueForEnabledAuditLogEventTypes();
}
);
}
@ -107,7 +107,7 @@ export class SystemSettingsComponent
this.setValueForBannerMessage();
}
this.initLogEventTypes();
this.setValueForDisabledAuditLogEventTypes();
this.setValueForEnabledAuditLogEventTypes();
}
ngAfterViewChecked() {
@ -137,6 +137,7 @@ export class SystemSettingsComponent
value: event.event_type,
id: event.event_type,
}));
this.setValueForEnabledAuditLogEventTypes();
},
error => {
this.errorHandler.error(error);
@ -144,11 +145,18 @@ export class SystemSettingsComponent
);
}
setValueForDisabledAuditLogEventTypes() {
const checkedEventTypes =
setValueForEnabledAuditLogEventTypes() {
const disabledEventTypes =
this.currentConfig?.disabled_audit_log_event_types?.value;
this.selectedLogEventTypes =
checkedEventTypes?.split(',')?.filter(evt => evt !== '') ?? [];
const disabledEvents =
disabledEventTypes?.split(',')?.filter(evt => evt !== '') ?? [];
const allEventTypes = this.logEventTypes.map(evt => evt.value);
// Enabled = All - Disabled
this.selectedLogEventTypes = allEventTypes.filter(
evt => !disabledEvents.includes(evt)
);
}
setValueForBannerMessage() {
@ -233,22 +241,27 @@ export class SystemSettingsComponent
}
hasLogEventType(resourceType: string): boolean {
return this.selectedLogEventTypes?.indexOf(resourceType) !== -1;
return this.selectedLogEventTypes?.includes(resourceType);
}
setLogEventType(resourceType: string) {
if (this.selectedLogEventTypes.indexOf(resourceType) === -1) {
this.selectedLogEventTypes.push(resourceType);
} else {
this.selectedLogEventTypes.splice(
this.selectedLogEventTypes.findIndex(
item => item === resourceType
),
1
if (this.selectedLogEventTypes.includes(resourceType)) {
this.selectedLogEventTypes = this.selectedLogEventTypes.filter(
evt => evt !== resourceType
);
} else {
this.selectedLogEventTypes.push(resourceType);
}
const allEventTypes = this.logEventTypes.map(evt => evt.value);
// Disabled = All - Enabled
const disabled = allEventTypes.filter(
evt => !this.selectedLogEventTypes.includes(evt)
);
// Update backend config
this.currentConfig.disabled_audit_log_event_types.value =
this.selectedLogEventTypes.join(',');
disabled.join(',');
}
public getChanges() {

View File

@ -28,6 +28,9 @@
<clr-dg-column>{{
'VULNERABILITY.GRID.COLUMN_SEVERITY' | translate
}}</clr-dg-column>
<clr-dg-column>{{
'VULNERABILITY.GRID.COLUMN_STATUS' | translate
}}</clr-dg-column>
<clr-dg-column class="min-width">{{
'VULNERABILITY.GRID.COLUMN_PACKAGE' | translate
}}</clr-dg-column>
@ -117,6 +120,7 @@
severityText(c.severity) | translate
}}</span>
</clr-dg-cell>
<clr-dg-cell>{{ c.status }}</clr-dg-cell>
<clr-dg-cell class="ellipsis" title="{{ c.package }}">{{
c.package
}}</clr-dg-cell>

View File

@ -30,6 +30,7 @@ export enum OptionType {
CVE_ID = 'cve_id',
SEVERITY = 'severity',
CVSS3 = 'cvss_score_v3',
STATUS = 'status',
REPO = 'repository_name',
PACKAGE = 'package',
TAG = 'tag',
@ -42,6 +43,7 @@ export const OptionType_I18n_Map = {
[OptionType.CVE_ID]: 'SECURITY_HUB.CVE_ID',
[OptionType.SEVERITY]: 'VULNERABILITY.GRID.COLUMN_SEVERITY',
[OptionType.CVSS3]: 'VULNERABILITY.GRID.CVSS3',
[OptionType.STATUS]: 'VULNERABILITY.GRID.COLUMN_STATUS',
[OptionType.REPO]: 'SECURITY_HUB.REPO_NAME',
[OptionType.PACKAGE]: 'VULNERABILITY.GRID.COLUMN_PACKAGE',
[OptionType.TAG]: 'REPLICATION.TAG',

View File

@ -33,6 +33,7 @@ export class VulnerabilityFilterComponent {
OptionType.CVE_ID,
OptionType.SEVERITY,
OptionType.CVSS3,
OptionType.STATUS,
OptionType.PROJECT_ID,
OptionType.REPO,
OptionType.PACKAGE,
@ -43,6 +44,7 @@ export class VulnerabilityFilterComponent {
OptionType.CVE_ID,
OptionType.SEVERITY,
OptionType.CVSS3,
OptionType.STATUS,
OptionType.PROJECT_ID,
OptionType.REPO,
OptionType.PACKAGE,

View File

@ -64,6 +64,9 @@
<clr-dg-column [clrDgSortBy]="cvssSort">{{
'VULNERABILITY.GRID.CVSS3' | translate
}}</clr-dg-column>
<clr-dg-column>{{
'VULNERABILITY.GRID.COLUMN_STATUS' | translate
}}</clr-dg-column>
<clr-dg-column [clrDgField]="'package'">{{
'VULNERABILITY.GRID.COLUMN_PACKAGE' | translate
}}</clr-dg-column>
@ -154,6 +157,7 @@
<div class="clr-col">{{ item?.value?.V3Score }}</div>
</div>
</clr-dg-cell>
<clr-dg-cell>{{ res.status }}</clr-dg-cell>
<clr-dg-cell>{{ res.package }}</clr-dg-cell>
<clr-dg-cell>{{ res.version }}</clr-dg-cell>
<clr-dg-cell>

View File

@ -43,6 +43,7 @@ describe('ArtifactVulnerabilitiesComponent', () => {
{
id: '123',
severity: 'low',
status: 'fixed',
package: 'test',
version: '1.0',
links: ['testLink'],
@ -52,6 +53,7 @@ describe('ArtifactVulnerabilitiesComponent', () => {
{
id: '456',
severity: 'high',
status: 'fixed',
package: 'test',
version: '1.0',
links: ['testLink'],
@ -163,7 +165,7 @@ describe('ArtifactVulnerabilitiesComponent', () => {
await fixture.whenStable();
const cols = fixture.nativeElement.querySelectorAll('clr-dg-column');
expect(cols).toBeTruthy();
expect(cols.length).toEqual(7);
expect(cols.length).toEqual(8);
const firstRow = fixture.nativeElement.querySelector('clr-dg-row');
const cells = firstRow.querySelectorAll('clr-dg-cell');
expect(cells[cells.length - 1].innerText).toEqual('TAG_RETENTION.YES');

View File

@ -21,6 +21,7 @@ import locale_fr from '@angular/common/locales/fr';
import locale_pt from '@angular/common/locales/pt-PT';
import locale_tr from '@angular/common/locales/tr';
import locale_de from '@angular/common/locales/de';
import locale_ru from '@angular/common/locales/ru';
import { ClrCommonStrings } from '@clr/angular/utils/i18n/common-strings.interface';
export const enum AlertType {
@ -261,6 +262,7 @@ export const LANGUAGES = {
'pt-br': ['Português do Brasil', locale_pt],
'tr-tr': ['Türkçe', locale_tr],
'de-de': ['Deutsch', locale_de],
'ru-ru': ['Русский', locale_ru],
} as const;
export const supportedLangs = Object.keys(LANGUAGES) as SupportedLanguage[];
/**

View File

@ -197,6 +197,7 @@ export enum VulnerabilitySeverity {
export interface VulnerabilityBase {
id: string;
severity: string;
status: string;
package: string;
version: string;
}

View File

@ -1842,8 +1842,8 @@
"PURGE_HISTORY": "Bereinigungshistorie",
"FORWARD_ENDPOINT": "Syslog Endpunkt für die Weiterleitung des Audit-Logs",
"FORWARD_ENDPOINT_TOOLTIP": "Leite Audit-Logs an einen Syslog-Endpunkt, zum Beispiel: harbor-log:10514",
"DISABLE_AUDIT_LOG_EVENT_TYPE": "Disable Audit Log Event Type",
"DISABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be disabled.",
"ENABLE_AUDIT_LOG_EVENT_TYPE": "Enable Audit Log Event Type",
"ENABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be enabled.",
"AUDIT_LOG_EVENT_TYPE_EMPTY": "No audit log event type exists.",
"SKIP_DATABASE": "Datenbank für Audit-Logs übergehen",
"SKIP_DATABASE_TOOLTIP": "Audit-Logs werden nicht in die Datenbank geschrieben. Nur verfügbar, wenn die Weiterleitung für Audit-Logs konfiguriert ist.",

View File

@ -1106,6 +1106,7 @@
"PLACEHOLDER": "We couldn't find any scanning results!",
"COLUMN_ID": "Vulnerability",
"COLUMN_SEVERITY": "Severity",
"COLUMN_STATUS": "Status",
"COLUMN_PACKAGE": "Package",
"COLUMN_PACKAGES": "Packages",
"COLUMN_VERSION": "Current version",
@ -1844,8 +1845,8 @@
"PURGE_HISTORY": "Purge History",
"FORWARD_ENDPOINT": "Audit Log Forward Syslog Endpoint",
"FORWARD_ENDPOINT_TOOLTIP": "Forward audit logs to the syslog endpoint, for example: harbor-log:10514",
"DISABLE_AUDIT_LOG_EVENT_TYPE": "Disable Audit Log Event Type",
"DISABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be disabled.",
"ENABLE_AUDIT_LOG_EVENT_TYPE": "Enable Audit Log Event Type",
"ENABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be enabled.",
"AUDIT_LOG_EVENT_TYPE_EMPTY": "No audit log event type exists.",
"SKIP_DATABASE": "Skip Audit Log Database",
"SKIP_DATABASE_TOOLTIP": "Skip to log audit log in the database, only available when audit log forward endpoint is configured",

View File

@ -1834,8 +1834,8 @@
"PURGE_HISTORY": "Historial de purga",
"FORWARD_ENDPOINT": "Audit Log Reenviar Syslog Endpoint",
"FORWARD_ENDPOINT_TOOLTIP": "Reenviar audit logs al endpoint de syslog, por ejemplo: harbor-log:10514",
"DISABLE_AUDIT_LOG_EVENT_TYPE": "Disable Audit Log Event Type",
"DISABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be disabled.",
"ENABLE_AUDIT_LOG_EVENT_TYPE": "Enable Audit Log Event Type",
"ENABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be enabled.",
"AUDIT_LOG_EVENT_TYPE_EMPTY": "No audit log event type exists.",
"SKIP_DATABASE": "Omitir Base de Datos Audit Log",
"SKIP_DATABASE_TOOLTIP": "Saltar al registro de auditoría en la base de datos, solo disponible cuando se configura el endpoint de reenvío del registro de auditoría",

View File

@ -1844,8 +1844,8 @@
"PURGE_HISTORY": "Historique de purges",
"FORWARD_ENDPOINT": "Endpoint Syslog de transfert de logs d'audit",
"FORWARD_ENDPOINT_TOOLTIP": "Transfère les logs d'audit à l'endpoint Syslog, par exemple : harbor-log:10514",
"DISABLE_AUDIT_LOG_EVENT_TYPE": "Disable Audit Log Event Type",
"DISABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be disabled.",
"ENABLE_AUDIT_LOG_EVENT_TYPE": "Enable Audit Log Event Type",
"ENABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be enabled.",
"AUDIT_LOG_EVENT_TYPE_EMPTY": "No audit log event type exists.",
"SKIP_DATABASE": "Ne pas enregistrer les logs d'audit dans la base de données",
"SKIP_DATABASE_TOOLTIP": "Ne pas enregistrer les logs d'audit dans la base de données, disponible uniquement lorsque l'endpoint de transfert de logs d'audit est configuré",

View File

@ -1833,8 +1833,8 @@
"PURGE_HISTORY": "제거 기록",
"FORWARD_ENDPOINT": "감사 로그를 Syslog 엔트포인트로 전달",
"FORWARD_ENDPOINT_TOOLTIP": "감사 로그를 syslog 엔드포인트로 전달합니다(예: harbor-log:10514)",
"DISABLE_AUDIT_LOG_EVENT_TYPE": "Disable Audit Log Event Type",
"DISABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be disabled.",
"ENABLE_AUDIT_LOG_EVENT_TYPE": "Enable Audit Log Event Type",
"ENABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be enabled.",
"AUDIT_LOG_EVENT_TYPE_EMPTY": "No audit log event type exists.",
"SKIP_DATABASE": "감사 로그 데이터베이스 건너뛰기",
"SKIP_DATABASE_TOOLTIP": "데이터베이스의 감사 로그 로그로 건너뛰기, 감사 로그 전달 엔드포인트가 구성된 경우에만 사용 가능",

View File

@ -1839,8 +1839,8 @@
"PURGE_HISTORY": "Purge History",
"FORWARD_ENDPOINT": "Audit Log Forward Syslog Endpoint",
"FORWARD_ENDPOINT_TOOLTIP": "Forward audit logs to the syslog endpoint, for example: harbor-log:10514",
"DISABLE_AUDIT_LOG_EVENT_TYPE": "Disable Audit Log Event Type",
"DISABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be disabled.",
"ENABLE_AUDIT_LOG_EVENT_TYPE": "Enable Audit Log Event Type",
"ENABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be enabled.",
"AUDIT_LOG_EVENT_TYPE_EMPTY": "No audit log event type exists.",
"SKIP_DATABASE": "Skip Audit Log Database",
"SKIP_DATABASE_TOOLTIP": "Skip to log audit log in the database, only available when audit log forward endpoint is configured",

File diff suppressed because it is too large Load Diff

View File

@ -1843,8 +1843,8 @@
"PURGE_HISTORY": "Purge History",
"FORWARD_ENDPOINT": "Audit Log Forward Syslog Endpoint",
"FORWARD_ENDPOINT_TOOLTIP": "Forward audit logs to the syslog endpoint, for example: harbor-log:10514",
"DISABLE_AUDIT_LOG_EVENT_TYPE": "Disable Audit Log Event Type",
"DISABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be disabled.",
"ENABLE_AUDIT_LOG_EVENT_TYPE": "Enable Audit Log Event Type",
"ENABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be enabled.",
"AUDIT_LOG_EVENT_TYPE_EMPTY": "No audit log event type exists.",
"SKIP_DATABASE": "Skip Audit Log Database",
"SKIP_DATABASE_TOOLTIP": "Skip to log audit log in the database, only available when audit log forward endpoint is configured",

View File

@ -1841,8 +1841,8 @@
"PURGE_HISTORY": "清理历史",
"FORWARD_ENDPOINT": "日志转发端点",
"FORWARD_ENDPOINT_TOOLTIP": "将日志转发到指定的 syslog 端点例如harbor-log:10514",
"DISABLE_AUDIT_LOG_EVENT_TYPE": "Disable Audit Log Event Type",
"DISABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be disabled.",
"ENABLE_AUDIT_LOG_EVENT_TYPE": "Enable Audit Log Event Type",
"ENABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be enabled.",
"AUDIT_LOG_EVENT_TYPE_EMPTY": "No audit log event type exists.",
"SKIP_DATABASE": "跳过日志数据库",
"SKIP_DATABASE_TOOLTIP": "开启此项将不会在数据库中记录日志,需先配置日志转发端点",

View File

@ -1839,8 +1839,8 @@
"PURGE_HISTORY": "清除歷史",
"FORWARD_ENDPOINT": "稽核日誌轉發 Syslog 端點",
"FORWARD_ENDPOINT_TOOLTIP": "將稽核日誌轉發至 syslog 端點,例如: harbor-log:10514",
"DISABLE_AUDIT_LOG_EVENT_TYPE": "Disable Audit Log Event Type",
"DISABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be disabled.",
"ENABLE_AUDIT_LOG_EVENT_TYPE": "Enable Audit Log Event Type",
"ENABLE_AUDIT_LOG_EVENT_TYPE_TOOLTIP": "The comma-separated name of the audit log event to be enabled.",
"AUDIT_LOG_EVENT_TYPE_EMPTY": "No audit log event type exists.",
"SKIP_DATABASE": "跳過稽核日誌資料庫",
"SKIP_DATABASE_TOOLTIP": "跳過在資料庫中記錄稽核日誌,僅在設定稽核日誌轉發端點時可用",

View File

@ -260,6 +260,10 @@ func (r *retentionAPI) DeleteRetention(ctx context.Context, params operation.Del
if err = r.retentionCtl.DeleteRetention(ctx, params.ID); err != nil {
return r.SendError(ctx, err)
}
// delete retention data in project_metadata
if err := r.proMetaMgr.Delete(ctx, p.Scope.Reference, "retention_id"); err != nil {
return r.SendError(ctx, err)
}
return operation.NewDeleteRetentionOK()
}

View File

@ -136,6 +136,7 @@ func toVulnerabilities(vuls []*secHubModel.VulnerabilityItem) []*models.Vulnerab
Digest: item.Digest,
CVEID: item.CVEID,
Severity: item.Severity,
Status: item.Status,
Package: item.Package,
Tags: item.Tags,
Version: item.PackageVersion,

View File

@ -3,13 +3,17 @@ import os
import subprocess
import time
import v2_swagger_client
import importlib
try:
from urllib import getproxies
except ImportError:
from urllib.request import getproxies
def swagger_module():
module = importlib.import_module("v2_swagger_client")
return module
class Server:
def __init__(self, endpoint, verify_ssl):
self.endpoint = endpoint
@ -26,6 +30,7 @@ def get_endpoint():
return os.environ.get("HARBOR_HOST_SCHEMA", "https")+ "://"+harbor_server+"/api/v2.0"
def _create_client(server, credential, debug, api_type):
v2_swagger_client = swagger_module()
cfg = v2_swagger_client.Configuration()
cfg.host = server.endpoint
cfg.verify_ssl = server.verify_ssl

View File

@ -2,12 +2,16 @@
import time
import base
import v2_swagger_client
import importlib
import docker_api
from docker_api import DockerAPI
from v2_swagger_client.rest import ApiException
# from v2_swagger_client.rest import ApiException
from testutils import DOCKER_USER, DOCKER_PWD
def swagger_module():
module = importlib.import_module("v2_swagger_client")
return module
def pull_harbor_image(registry, username, password, image, tag, expected_login_error_message = None, expected_error_message = None):
_docker_api = DockerAPI()
_docker_api.docker_login(registry, username, password, expected_error_message = expected_login_error_message)
@ -122,6 +126,7 @@ class Repository(base.Base, object):
def add_label_to_tag(self, repo_name, tag, label_id, expect_status_code = 200, **kwargs):
client = self._get_client(**kwargs)
v2_swagger_client = swagger_module()
label = v2_swagger_client.Label(id=label_id)
_, status_code, _ = client.repositories_repo_name_tags_tag_labels_post_with_http_info(repo_name, tag, label)
base._assert_status_code(expect_status_code, status_code)
@ -158,11 +163,12 @@ class Repository(base.Base, object):
def retag_image(self, repo_name, tag, src_image, override=True, expect_status_code = 200, expect_response_body = None, **kwargs):
client = self._get_client(**kwargs)
v2_swagger_client = swagger_module()
request = v2_swagger_client.RetagReq(tag=tag, src_image=src_image, override=override)
try:
data, status_code, _ = client.repositories_repo_name_tags_post_with_http_info(repo_name, request)
except ApiException as e:
except v2_swagger_client.rest.ApiException as e:
base._assert_status_code(expect_status_code, e.status)
if expect_response_body is not None:
base._assert_status_body(expect_response_body, e.body)

View File

@ -15,7 +15,7 @@ print(sys.path)
files_directory = os.getcwd() + "/tests/files/"
import v2_swagger_client
import importlib
admin_user = "admin"
admin_pwd = "Harbor12345"
@ -35,9 +35,12 @@ ES_ENDPOINT = os.environ.get('ES_ENDPOINT', '')
BASE_IMAGE = dict(name='busybox', tag='latest')
BASE_IMAGE_ABS_PATH_NAME = '/' + BASE_IMAGE['name'] + '.tar'
def swagger_module():
module = importlib.import_module("v2_swagger_client")
return module
def GetRepositoryApi(username, password, harbor_server= os.environ.get("HARBOR_HOST", '')):
v2_swagger_client = swagger_module()
cfg = v2_swagger_client.Configuration()
cfg.host = "https://"+harbor_server+"/api/v2.0"
cfg.username = username
@ -49,6 +52,7 @@ def GetRepositoryApi(username, password, harbor_server= os.environ.get("HARBOR_H
return api_instance
def GetUserGroupApi(username, password, harbor_server= os.environ.get("HARBOR_HOST", '')):
v2_swagger_client = swagger_module()
cfg = v2_swagger_client.Configuration()
cfg.host = "https://"+harbor_server+"/api/v2.0"
cfg.username = username