mirror of https://github.com/apache/kafka.git
Backport Jenkinsfile to 2.4 branch (#9329)
Also fix a Scala 2.11 compile error in GroupMetadataManagerTest
This commit is contained in:
parent
52ab0ad6cf
commit
63f3e1c33b
|
@ -0,0 +1,164 @@
|
||||||
|
/*
|
||||||
|
*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
def setupGradle() {
|
||||||
|
// Delete gradle cache to workaround cache corruption bugs, see KAFKA-3167
|
||||||
|
dir('.gradle') {
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
sh './gradlew -version'
|
||||||
|
}
|
||||||
|
|
||||||
|
def doValidation() {
|
||||||
|
sh '''
|
||||||
|
./gradlew -PscalaVersion=$SCALA_VERSION clean compileJava compileScala compileTestJava compileTestScala \
|
||||||
|
spotlessScalaCheck checkstyleMain checkstyleTest spotbugsMain rat \
|
||||||
|
--profile --no-daemon --continue -PxmlSpotBugsReport=true
|
||||||
|
'''
|
||||||
|
}
|
||||||
|
|
||||||
|
def doTest() {
|
||||||
|
sh '''
|
||||||
|
./gradlew -PscalaVersion=$SCALA_VERSION unitTest integrationTest \
|
||||||
|
--profile --no-daemon --continue -PtestLoggingEvents=started,passed,skipped,failed \
|
||||||
|
-PignoreFailures=true -PmaxParallelForks=2 -PmaxTestRetries=1 -PmaxTestRetryFailures=5
|
||||||
|
'''
|
||||||
|
junit '**/build/test-results/**/TEST-*.xml'
|
||||||
|
}
|
||||||
|
|
||||||
|
def doStreamsArchetype() {
|
||||||
|
echo 'Verify that Kafka Streams archetype compiles'
|
||||||
|
|
||||||
|
sh '''
|
||||||
|
./gradlew streams:install clients:install connect:json:install connect:api:install \
|
||||||
|
|| { echo 'Could not install kafka-streams.jar (and dependencies) locally`'; exit 1; }
|
||||||
|
'''
|
||||||
|
|
||||||
|
VERSION = sh(script: 'grep "^version=" gradle.properties | cut -d= -f 2', returnStdout: true).trim()
|
||||||
|
|
||||||
|
dir('streams/quickstart') {
|
||||||
|
sh '''
|
||||||
|
mvn clean install -Dgpg.skip \
|
||||||
|
|| { echo 'Could not `mvn install` streams quickstart archetype'; exit 1; }
|
||||||
|
'''
|
||||||
|
|
||||||
|
dir('test-streams-archetype') {
|
||||||
|
// Note the double quotes for variable interpolation
|
||||||
|
sh """
|
||||||
|
echo "Y" | mvn archetype:generate \
|
||||||
|
-DarchetypeCatalog=local \
|
||||||
|
-DarchetypeGroupId=org.apache.kafka \
|
||||||
|
-DarchetypeArtifactId=streams-quickstart-java \
|
||||||
|
-DarchetypeVersion=${VERSION} \
|
||||||
|
-DgroupId=streams.examples \
|
||||||
|
-DartifactId=streams.examples \
|
||||||
|
-Dversion=0.1 \
|
||||||
|
-Dpackage=myapps \
|
||||||
|
|| { echo 'Could not create new project using streams quickstart archetype'; exit 1; }
|
||||||
|
"""
|
||||||
|
|
||||||
|
dir('streams.examples') {
|
||||||
|
sh '''
|
||||||
|
mvn compile \
|
||||||
|
|| { echo 'Could not compile streams quickstart archetype project'; exit 1; }
|
||||||
|
'''
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def tryStreamsArchetype() {
|
||||||
|
try {
|
||||||
|
doStreamsArchetype()
|
||||||
|
} catch(err) {
|
||||||
|
echo 'Failed to build Kafka Streams archetype, marking this build UNSTABLE'
|
||||||
|
currentBuild.result = 'UNSTABLE'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pipeline {
|
||||||
|
agent none
|
||||||
|
stages {
|
||||||
|
stage('Build') {
|
||||||
|
parallel {
|
||||||
|
stage('JDK 8 and Scala 2.11') {
|
||||||
|
agent { label 'ubuntu' }
|
||||||
|
tools {
|
||||||
|
jdk 'JDK 1.8 (latest)'
|
||||||
|
maven 'Maven 3.6.3'
|
||||||
|
}
|
||||||
|
options {
|
||||||
|
timeout(time: 8, unit: 'HOURS')
|
||||||
|
timestamps()
|
||||||
|
}
|
||||||
|
environment {
|
||||||
|
SCALA_VERSION=2.11
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
setupGradle()
|
||||||
|
doValidation()
|
||||||
|
doTest()
|
||||||
|
tryStreamsArchetype()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('JDK 11 and Scala 2.12') {
|
||||||
|
agent { label 'ubuntu' }
|
||||||
|
tools {
|
||||||
|
jdk 'JDK 11 (latest)'
|
||||||
|
}
|
||||||
|
options {
|
||||||
|
timeout(time: 8, unit: 'HOURS')
|
||||||
|
timestamps()
|
||||||
|
}
|
||||||
|
environment {
|
||||||
|
SCALA_VERSION=2.12
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
setupGradle()
|
||||||
|
doValidation()
|
||||||
|
doTest()
|
||||||
|
echo 'Skipping Kafka Streams archetype test for Java 11'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stage('JDK 11 and Scala 2.13') {
|
||||||
|
agent { label 'ubuntu' }
|
||||||
|
tools {
|
||||||
|
jdk 'JDK 11 (latest)'
|
||||||
|
}
|
||||||
|
options {
|
||||||
|
timeout(time: 8, unit: 'HOURS')
|
||||||
|
timestamps()
|
||||||
|
}
|
||||||
|
environment {
|
||||||
|
SCALA_VERSION=2.13
|
||||||
|
}
|
||||||
|
steps {
|
||||||
|
setupGradle()
|
||||||
|
doValidation()
|
||||||
|
doTest()
|
||||||
|
echo 'Skipping Kafka Streams archetype test for Java 11'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -112,6 +112,7 @@ ext {
|
||||||
buildVersionFileName = "kafka-version.properties"
|
buildVersionFileName = "kafka-version.properties"
|
||||||
|
|
||||||
userMaxForks = project.hasProperty('maxParallelForks') ? maxParallelForks.toInteger() : null
|
userMaxForks = project.hasProperty('maxParallelForks') ? maxParallelForks.toInteger() : null
|
||||||
|
userIgnoreFailures = project.hasProperty('ignoreFailures') ? ignoreFailures : false
|
||||||
|
|
||||||
skipSigning = project.hasProperty('skipSigning') && skipSigning.toBoolean()
|
skipSigning = project.hasProperty('skipSigning') && skipSigning.toBoolean()
|
||||||
shouldSign = !skipSigning && !version.endsWith("SNAPSHOT") && project.gradle.startParameter.taskNames.any { it.contains("upload") }
|
shouldSign = !skipSigning && !version.endsWith("SNAPSHOT") && project.gradle.startParameter.taskNames.any { it.contains("upload") }
|
||||||
|
@ -300,6 +301,7 @@ subprojects {
|
||||||
|
|
||||||
test {
|
test {
|
||||||
maxParallelForks = userMaxForks ?: Runtime.runtime.availableProcessors()
|
maxParallelForks = userMaxForks ?: Runtime.runtime.availableProcessors()
|
||||||
|
ignoreFailures = userIgnoreFailures
|
||||||
|
|
||||||
minHeapSize = "256m"
|
minHeapSize = "256m"
|
||||||
maxHeapSize = "2048m"
|
maxHeapSize = "2048m"
|
||||||
|
@ -314,6 +316,7 @@ subprojects {
|
||||||
|
|
||||||
task integrationTest(type: Test, dependsOn: compileJava) {
|
task integrationTest(type: Test, dependsOn: compileJava) {
|
||||||
maxParallelForks = userMaxForks ?: Runtime.runtime.availableProcessors()
|
maxParallelForks = userMaxForks ?: Runtime.runtime.availableProcessors()
|
||||||
|
ignoreFailures = userIgnoreFailures
|
||||||
|
|
||||||
minHeapSize = "256m"
|
minHeapSize = "256m"
|
||||||
maxHeapSize = "2048m"
|
maxHeapSize = "2048m"
|
||||||
|
@ -337,6 +340,7 @@ subprojects {
|
||||||
|
|
||||||
task unitTest(type: Test, dependsOn: compileJava) {
|
task unitTest(type: Test, dependsOn: compileJava) {
|
||||||
maxParallelForks = userMaxForks ?: Runtime.runtime.availableProcessors()
|
maxParallelForks = userMaxForks ?: Runtime.runtime.availableProcessors()
|
||||||
|
ignoreFailures = userIgnoreFailures
|
||||||
|
|
||||||
minHeapSize = "256m"
|
minHeapSize = "256m"
|
||||||
maxHeapSize = "2048m"
|
maxHeapSize = "2048m"
|
||||||
|
|
|
@ -45,7 +45,7 @@ import org.apache.kafka.common.requests.ProduceResponse.PartitionResponse
|
||||||
import org.apache.kafka.common.utils.Utils
|
import org.apache.kafka.common.utils.Utils
|
||||||
import org.apache.kafka.common.KafkaException
|
import org.apache.kafka.common.KafkaException
|
||||||
import org.easymock.{Capture, EasyMock, IAnswer}
|
import org.easymock.{Capture, EasyMock, IAnswer}
|
||||||
import org.junit.Assert.{assertEquals, assertFalse, assertNull, assertTrue, assertThrows}
|
import org.junit.Assert.{assertEquals, assertFalse, assertNull, assertTrue}
|
||||||
import org.junit.{Before, Test}
|
import org.junit.{Before, Test}
|
||||||
import org.scalatest.Assertions.fail
|
import org.scalatest.Assertions.fail
|
||||||
|
|
||||||
|
@ -912,9 +912,13 @@ class GroupMetadataManagerTest {
|
||||||
// reset the position to the starting position 0 so that it can read the data in correct order
|
// reset the position to the starting position 0 so that it can read the data in correct order
|
||||||
groupMetadataRecordValue.position(0)
|
groupMetadataRecordValue.position(0)
|
||||||
|
|
||||||
val e = assertThrows(classOf[KafkaException],
|
try {
|
||||||
() => GroupMetadataManager.readGroupMessageValue(groupId, groupMetadataRecordValue, time))
|
GroupMetadataManager.readGroupMessageValue(groupId, groupMetadataRecordValue, time)
|
||||||
assertEquals(s"Unknown group metadata version ${unsupportedVersion}", e.getMessage)
|
fail("Expected KafkaException here")
|
||||||
|
} catch {
|
||||||
|
case e: KafkaException => assertEquals(s"Unknown group metadata version ${unsupportedVersion}", e.getMessage)
|
||||||
|
case _ => fail("Expected KafkaException here")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
Loading…
Reference in New Issue