Browse Source

Backport Jenkinsfile to 2.4 branch (#9329)

Also fix a Scala 2.11 compile error in GroupMetadataManagerTest
pull/9481/head
David Arthur 4 years ago committed by GitHub
parent
commit
63f3e1c33b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 164
      Jenkinsfile
  2. 4
      build.gradle
  3. 12
      core/src/test/scala/unit/kafka/coordinator/group/GroupMetadataManagerTest.scala

164
Jenkinsfile vendored

@ -0,0 +1,164 @@ @@ -0,0 +1,164 @@
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
def setupGradle() {
// Delete gradle cache to workaround cache corruption bugs, see KAFKA-3167
dir('.gradle') {
deleteDir()
}
sh './gradlew -version'
}
def doValidation() {
sh '''
./gradlew -PscalaVersion=$SCALA_VERSION clean compileJava compileScala compileTestJava compileTestScala \
spotlessScalaCheck checkstyleMain checkstyleTest spotbugsMain rat \
--profile --no-daemon --continue -PxmlSpotBugsReport=true
'''
}
def doTest() {
sh '''
./gradlew -PscalaVersion=$SCALA_VERSION unitTest integrationTest \
--profile --no-daemon --continue -PtestLoggingEvents=started,passed,skipped,failed \
-PignoreFailures=true -PmaxParallelForks=2 -PmaxTestRetries=1 -PmaxTestRetryFailures=5
'''
junit '**/build/test-results/**/TEST-*.xml'
}
def doStreamsArchetype() {
echo 'Verify that Kafka Streams archetype compiles'
sh '''
./gradlew streams:install clients:install connect:json:install connect:api:install \
|| { echo 'Could not install kafka-streams.jar (and dependencies) locally`'; exit 1; }
'''
VERSION = sh(script: 'grep "^version=" gradle.properties | cut -d= -f 2', returnStdout: true).trim()
dir('streams/quickstart') {
sh '''
mvn clean install -Dgpg.skip \
|| { echo 'Could not `mvn install` streams quickstart archetype'; exit 1; }
'''
dir('test-streams-archetype') {
// Note the double quotes for variable interpolation
sh """
echo "Y" | mvn archetype:generate \
-DarchetypeCatalog=local \
-DarchetypeGroupId=org.apache.kafka \
-DarchetypeArtifactId=streams-quickstart-java \
-DarchetypeVersion=${VERSION} \
-DgroupId=streams.examples \
-DartifactId=streams.examples \
-Dversion=0.1 \
-Dpackage=myapps \
|| { echo 'Could not create new project using streams quickstart archetype'; exit 1; }
"""
dir('streams.examples') {
sh '''
mvn compile \
|| { echo 'Could not compile streams quickstart archetype project'; exit 1; }
'''
}
}
}
}
def tryStreamsArchetype() {
try {
doStreamsArchetype()
} catch(err) {
echo 'Failed to build Kafka Streams archetype, marking this build UNSTABLE'
currentBuild.result = 'UNSTABLE'
}
}
pipeline {
agent none
stages {
stage('Build') {
parallel {
stage('JDK 8 and Scala 2.11') {
agent { label 'ubuntu' }
tools {
jdk 'JDK 1.8 (latest)'
maven 'Maven 3.6.3'
}
options {
timeout(time: 8, unit: 'HOURS')
timestamps()
}
environment {
SCALA_VERSION=2.11
}
steps {
setupGradle()
doValidation()
doTest()
tryStreamsArchetype()
}
}
stage('JDK 11 and Scala 2.12') {
agent { label 'ubuntu' }
tools {
jdk 'JDK 11 (latest)'
}
options {
timeout(time: 8, unit: 'HOURS')
timestamps()
}
environment {
SCALA_VERSION=2.12
}
steps {
setupGradle()
doValidation()
doTest()
echo 'Skipping Kafka Streams archetype test for Java 11'
}
}
stage('JDK 11 and Scala 2.13') {
agent { label 'ubuntu' }
tools {
jdk 'JDK 11 (latest)'
}
options {
timeout(time: 8, unit: 'HOURS')
timestamps()
}
environment {
SCALA_VERSION=2.13
}
steps {
setupGradle()
doValidation()
doTest()
echo 'Skipping Kafka Streams archetype test for Java 11'
}
}
}
}
}
}

4
build.gradle

@ -112,6 +112,7 @@ ext { @@ -112,6 +112,7 @@ ext {
buildVersionFileName = "kafka-version.properties"
userMaxForks = project.hasProperty('maxParallelForks') ? maxParallelForks.toInteger() : null
userIgnoreFailures = project.hasProperty('ignoreFailures') ? ignoreFailures : false
skipSigning = project.hasProperty('skipSigning') && skipSigning.toBoolean()
shouldSign = !skipSigning && !version.endsWith("SNAPSHOT") && project.gradle.startParameter.taskNames.any { it.contains("upload") }
@ -300,6 +301,7 @@ subprojects { @@ -300,6 +301,7 @@ subprojects {
test {
maxParallelForks = userMaxForks ?: Runtime.runtime.availableProcessors()
ignoreFailures = userIgnoreFailures
minHeapSize = "256m"
maxHeapSize = "2048m"
@ -314,6 +316,7 @@ subprojects { @@ -314,6 +316,7 @@ subprojects {
task integrationTest(type: Test, dependsOn: compileJava) {
maxParallelForks = userMaxForks ?: Runtime.runtime.availableProcessors()
ignoreFailures = userIgnoreFailures
minHeapSize = "256m"
maxHeapSize = "2048m"
@ -337,6 +340,7 @@ subprojects { @@ -337,6 +340,7 @@ subprojects {
task unitTest(type: Test, dependsOn: compileJava) {
maxParallelForks = userMaxForks ?: Runtime.runtime.availableProcessors()
ignoreFailures = userIgnoreFailures
minHeapSize = "256m"
maxHeapSize = "2048m"

12
core/src/test/scala/unit/kafka/coordinator/group/GroupMetadataManagerTest.scala

@ -45,7 +45,7 @@ import org.apache.kafka.common.requests.ProduceResponse.PartitionResponse @@ -45,7 +45,7 @@ import org.apache.kafka.common.requests.ProduceResponse.PartitionResponse
import org.apache.kafka.common.utils.Utils
import org.apache.kafka.common.KafkaException
import org.easymock.{Capture, EasyMock, IAnswer}
import org.junit.Assert.{assertEquals, assertFalse, assertNull, assertTrue, assertThrows}
import org.junit.Assert.{assertEquals, assertFalse, assertNull, assertTrue}
import org.junit.{Before, Test}
import org.scalatest.Assertions.fail
@ -912,9 +912,13 @@ class GroupMetadataManagerTest { @@ -912,9 +912,13 @@ class GroupMetadataManagerTest {
// reset the position to the starting position 0 so that it can read the data in correct order
groupMetadataRecordValue.position(0)
val e = assertThrows(classOf[KafkaException],
() => GroupMetadataManager.readGroupMessageValue(groupId, groupMetadataRecordValue, time))
assertEquals(s"Unknown group metadata version ${unsupportedVersion}", e.getMessage)
try {
GroupMetadataManager.readGroupMessageValue(groupId, groupMetadataRecordValue, time)
fail("Expected KafkaException here")
} catch {
case e: KafkaException => assertEquals(s"Unknown group metadata version ${unsupportedVersion}", e.getMessage)
case _ => fail("Expected KafkaException here")
}
}
@Test

Loading…
Cancel
Save