// Licensed to the Apache Software Foundation (ASF) under one or more
// contributor license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright ownership.
// The ASF licenses this file to You under the Apache License, Version 2.0
// (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import org.ajoberstar.grgit.Grgit
import java.nio.charset.StandardCharsets
buildscript {
repositories {
mavenCentral ( )
jcenter ( )
maven {
url "https://plugins.gradle.org/m2/"
}
}
apply from: file ( 'gradle/buildscript.gradle' ) , to: buildscript
apply from: "$rootDir/gradle/dependencies.gradle"
dependencies {
// For Apache Rat plugin to ignore non-Git files
classpath "org.ajoberstar.grgit:grgit-core:$versions.grgit"
classpath "com.github.ben-manes:gradle-versions-plugin:$versions.gradleVersionsPlugin"
classpath "org.scoverage:gradle-scoverage:$versions.scoveragePlugin"
classpath "com.github.jengelman.gradle.plugins:shadow:$versions.shadowPlugin"
classpath "org.owasp:dependency-check-gradle:$versions.owaspDepCheckPlugin"
classpath "com.diffplug.spotless:spotless-plugin-gradle:$versions.spotlessPlugin"
classpath "gradle.plugin.com.github.spotbugs.snom:spotbugs-gradle-plugin:$versions.spotbugsPlugin"
classpath "org.gradle:test-retry-gradle-plugin:$versions.testRetryPlugin"
}
}
apply plugin: "com.diffplug.spotless"
spotless {
scala {
target 'streams/**/*.scala'
scalafmt ( "$versions.scalafmt" ) . configFile ( 'checkstyle/.scalafmt.conf' )
}
}
allprojects {
repositories {
mavenCentral ( )
}
apply plugin: 'idea'
apply plugin: 'org.owasp.dependencycheck'
apply plugin: 'com.github.ben-manes.versions'
dependencyUpdates {
revision = "release"
resolutionStrategy {
componentSelection { rules - >
rules . all { ComponentSelection selection - >
boolean rejected = [ 'snap' , 'alpha' , 'beta' , 'rc' , 'cr' , 'm' ] . any { qualifier - >
selection . candidate . version = = ~ /(?i).*[.-]${qualifier}[.\d-]*/
}
if ( rejected ) {
selection . reject ( 'Release candidate' )
}
}
}
}
configurations . all {
// zinc is the Scala incremental compiler, it has a configuration for its own dependencies
// that are unrelated to the project dependencies, we should not change them
if ( name ! = "zinc" ) {
resolutionStrategy {
force (
// be explicit about the javassist dependency version instead of relying on the transitive version
libs . javassist ,
// ensure we have a single version in the classpath despite transitive dependencies
libs . scalaLibrary ,
libs . scalaReflect ,
libs . jacksonAnnotations ,
// be explicit about the Netty dependency version instead of relying on the version set by
// ZooKeeper (potentially older and containing CVEs)
libs . nettyHandler ,
libs . nettyTransportNativeEpoll
)
}
}
}
}
tasks . withType ( Javadoc ) {
// disable the crazy super-strict doclint tool in Java 8
// noinspection SpellCheckingInspection
options . addStringOption ( 'Xdoclint:none' , '-quiet' )
}
}
ext {
gradleVersion = versions . gradle
minJavaVersion = "8"
buildVersionFileName = "kafka-version.properties"
MINOR: Update to Gradle 6.5 and tweak build jvm config (#8751)
Gradle 6.5 includes a fix for https://github.com/gradle/gradle/pull/12866, which
affects the performance of Scala compilation.
I profiled the scalac build with async profiler and 54% of the time was on GC
even after the Gradle upgrade (it was more than 60% before), so I switched to
the throughput GC (GC latency is less important for batch builds) and it
was reduced to 38%.
I also centralized the jvm configuration in `build.gradle` and simplified it a bit
by removing the minHeapSize configuration from the test tasks.
On my desktop, the time to execute clean builds with no cached Gradle daemon
was reduced from 127 seconds to 97 seconds. With a cached daemon, it was
reduced from 120 seconds to 88 seconds. The performance regression when
we upgraded to Gradle 6.x was 27 seconds with a cached daemon
(https://github.com/apache/kafka/pull/7677#issuecomment-616271179), so it
should be fixed now.
Gradle 6.4 with no cached daemon:
```
BUILD SUCCESSFUL in 2m 7s
115 actionable tasks: 112 executed, 3 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.15s user 0.12s system 0% cpu 2:08.06 total
```
Gradle 6.4 with cached daemon:
```
BUILD SUCCESSFUL in 2m 0s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 0.95s user 0.10s system 0% cpu 2:01.42 total
```
Gradle 6.5 with no cached daemon:
```
BUILD SUCCESSFUL in 1m 46s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.27s user 0.12s system 1% cpu 1:47.71 total
```
Gradle 6.5 with cached daemon:
```
BUILD SUCCESSFUL in 1m 37s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.02s user 0.10s system 1% cpu 1:38.31 total
```
This PR with no cached Gradle daemon:
```
BUILD SUCCESSFUL in 1m 37s
115 actionable tasks: 81 executed, 34 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.27s user 0.10s system 1% cpu 1:38.70 total
```
This PR with cached Gradle daemon:
```
BUILD SUCCESSFUL in 1m 28s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.02s user 0.10s system 1% cpu 1:29.35 total
```
Reviewers: Manikumar Reddy <manikumar.reddy@gmail.com>, Chia-Ping Tsai <chia7712@gmail.com>
5 years ago
defaultMaxHeapSize = "2g"
defaultJvmArgs = [ "-Xss4m" , "-XX:+UseParallelGC" ]
userMaxForks = project . hasProperty ( 'maxParallelForks' ) ? maxParallelForks . toInteger ( ) : null
userIgnoreFailures = project . hasProperty ( 'ignoreFailures' ) ? ignoreFailures : false
userMaxTestRetries = project . hasProperty ( 'maxTestRetries' ) ? maxTestRetries . toInteger ( ) : 0
userMaxTestRetryFailures = project . hasProperty ( 'maxTestRetryFailures' ) ? maxTestRetryFailures . toInteger ( ) : 0
skipSigning = project . hasProperty ( 'skipSigning' ) & & skipSigning . toBoolean ( )
shouldSign = ! skipSigning & & ! version . endsWith ( "SNAPSHOT" ) & & project . gradle . startParameter . taskNames . any { it . contains ( "upload" ) }
mavenUrl = project . hasProperty ( 'mavenUrl' ) ? project . mavenUrl : ''
mavenUsername = project . hasProperty ( 'mavenUsername' ) ? project . mavenUsername : ''
mavenPassword = project . hasProperty ( 'mavenPassword' ) ? project . mavenPassword : ''
userShowStandardStreams = project . hasProperty ( "showStandardStreams" ) ? showStandardStreams : null
userTestLoggingEvents = project . hasProperty ( "testLoggingEvents" ) ? Arrays . asList ( testLoggingEvents . split ( "," ) ) : null
userEnableTestCoverage = project . hasProperty ( "enableTestCoverage" ) ? enableTestCoverage : false
// See README.md for details on this option and the reasoning for the default
userScalaOptimizerMode = project . hasProperty ( "scalaOptimizerMode" ) ? scalaOptimizerMode : "inline-kafka"
def scalaOptimizerValues = [ "none" , "method" , "inline-kafka" , "inline-scala" ]
if ( ! scalaOptimizerValues . contains ( userScalaOptimizerMode ) )
throw new GradleException ( "Unexpected value for scalaOptimizerMode property. Expected one of $scalaOptimizerValues), but received: $userScalaOptimizerMode" )
generatedDocsDir = new File ( "${project.rootDir}/docs/generated" )
commitId = project . hasProperty ( 'commitId' ) ? commitId : null
}
apply from: file ( 'wrapper.gradle' )
if ( file ( '.git' ) . exists ( ) ) {
apply from: file ( 'gradle/rat.gradle' )
rat {
// Exclude everything under the directory that git should be ignoring via .gitignore or that isn't checked in. These
// restrict us only to files that are checked in or are staged.
def repo = Grgit . open ( currentDir: project . getRootDir ( ) )
excludes = new ArrayList < String > ( repo . clean ( ignore: false , directories: true , dryRun: true ) )
// And some of the files that we have checked in should also be excluded from this check
excludes . addAll ( [
'**/.git/**' ,
'**/build/**' ,
'CONTRIBUTING.md' ,
'PULL_REQUEST_TEMPLATE.md' ,
'gradlew' ,
'gradlew.bat' ,
'gradle/wrapper/gradle-wrapper.properties' ,
'TROGDOR.md' ,
'**/README.md' ,
'**/id_rsa' ,
'**/id_rsa.pub' ,
'checkstyle/suppressions.xml' ,
'streams/quickstart/java/src/test/resources/projects/basic/goal.txt' ,
'streams/streams-scala/logs/*' ,
'**/generated/**'
] )
}
}
subprojects {
// enable running :dependencies task recursively on all subprojects
// eg: ./gradlew allDeps
task allDeps ( type: DependencyReportTask ) { }
// enable running :dependencyInsight task recursively on all subprojects
// eg: ./gradlew allDepInsight --configuration runtime --dependency com.fasterxml.jackson.core:jackson-databind
task allDepInsight ( type: DependencyInsightReportTask ) doLast { }
apply plugin: 'java'
// apply the eclipse plugin only to subprojects that hold code. 'connect' is just a folder.
if ( ! project . name . equals ( 'connect' ) ) {
apply plugin: 'eclipse'
fineTuneEclipseClasspathFile ( eclipse , project )
}
apply plugin: 'maven'
apply plugin: 'signing'
apply plugin: 'checkstyle'
apply plugin: "com.github.spotbugs"
apply plugin: 'org.gradle.test-retry'
sourceCompatibility = minJavaVersion
targetCompatibility = minJavaVersion
java {
consistentResolution {
// resolve the compileClasspath and then "inject" the result of resolution as strict constraints into the runtimeClasspath
useCompileClasspathVersions ( )
}
}
tasks . withType ( JavaCompile ) {
options . encoding = 'UTF-8'
options . compilerArgs < < "-Xlint:all"
// temporary exclusions until all the warnings are fixed
options . compilerArgs < < "-Xlint:-rawtypes"
options . compilerArgs < < "-Xlint:-serial"
options . compilerArgs < < "-Xlint:-try"
options . compilerArgs < < "-Werror"
// --release is the recommended way to select the target release, but it's only supported in Java 9 so we also
// set --source and --target via `sourceCompatibility` and `targetCompatibility`. If/when Gradle supports `--release`
// natively (https://github.com/gradle/gradle/issues/2510), we should switch to that.
if ( JavaVersion . current ( ) . isJava9Compatible ( ) )
options . compilerArgs < < "--release" < < minJavaVersion
}
uploadArchives {
repositories {
signing {
required { shouldSign }
sign configurations . archives
// To test locally, replace mavenUrl in ~/.gradle/gradle.properties to file://localhost/tmp/myRepo/
mavenDeployer {
beforeDeployment { MavenDeployment deployment - > signing . signPom ( deployment ) }
repository ( url: "${mavenUrl}" ) {
authentication ( userName: "${mavenUsername}" , password: "${mavenPassword}" )
}
afterEvaluate {
pom . artifactId = "${archivesBaseName}"
pom . project {
name 'Apache Kafka'
packaging 'jar'
url 'https://kafka.apache.org'
licenses {
license {
name 'The Apache Software License, Version 2.0'
url 'https://www.apache.org/licenses/LICENSE-2.0.txt'
distribution 'repo'
}
}
}
}
}
}
}
}
// Remove the relevant project name once it's converted to JUnit 5
def shouldUseJUnit5 = ! ( [ "runtime" , "streams-scala" , "streams" ] . contains ( it . project . name ) )
def testLoggingEvents = [ "passed" , "skipped" , "failed" ]
def testShowStandardStreams = false
def testExceptionFormat = 'full'
// Gradle built-in logging only supports sending test output to stdout, which generates a lot
// of noise, especially for passing tests. We really only want output for failed tests. This
// hooks into the output and logs it (so we don't have to buffer it all in memory) and only
// saves the output for failing tests. Directory and filenames are such that you can, e.g.,
// create a Jenkins rule to collect failed test output.
def logTestStdout = {
def testId = { TestDescriptor descriptor - >
"${descriptor.className}.${descriptor.name}" . toString ( )
}
def logFiles = new HashMap < String , File > ( )
def logStreams = new HashMap < String , FileOutputStream > ( )
beforeTest { TestDescriptor td - >
def tid = testId ( td )
// truncate the file name if it's too long
def logFile = new File (
"${projectDir}/build/reports/testOutput/${tid.substring(0, Math.min(tid.size(),240))}.test.stdout"
)
logFile . parentFile . mkdirs ( )
logFiles . put ( tid , logFile )
logStreams . put ( tid , new FileOutputStream ( logFile ) )
}
onOutput { TestDescriptor td , TestOutputEvent toe - >
def tid = testId ( td )
// Some output can happen outside the context of a specific test (e.g. at the class level)
// and beforeTest/afterTest seems to not be invoked for these cases (and similarly, there's
// a TestDescriptor hierarchy that includes the thread executing the test, Gradle tasks,
// etc). We see some of these in practice and it seems like something buggy in the Gradle
// test runner since we see it *before* any tests and it is frequently not related to any
// code in the test (best guess is that it is tail output from last test). We won't have
// an output file for these, so simply ignore them. If they become critical for debugging,
// they can be seen with showStandardStreams.
if ( td . name = = td . className | | td . className = = null ) {
// silently ignore output unrelated to specific test methods
return
} else if ( logStreams . get ( tid ) = = null ) {
println "WARNING: unexpectedly got output for a test [${tid}]" +
" that we didn't previously see in the beforeTest hook." +
" Message for debugging: [" + toe . message + "]."
return
}
try {
logStreams . get ( tid ) . write ( toe . message . getBytes ( StandardCharsets . UTF_8 ) )
} catch ( Exception e ) {
println "ERROR: Failed to write output for test ${tid}"
e . printStackTrace ( )
}
}
afterTest { TestDescriptor td , TestResult tr - >
def tid = testId ( td )
try {
logStreams . get ( tid ) . close ( )
if ( tr . resultType ! = TestResult . ResultType . FAILURE ) {
logFiles . get ( tid ) . delete ( )
} else {
def file = logFiles . get ( tid )
println "${tid} failed, log available in ${file}"
}
} catch ( Exception e ) {
println "ERROR: Failed to close stdout file for ${tid}"
e . printStackTrace ( )
} finally {
logFiles . remove ( tid )
logStreams . remove ( tid )
}
}
}
test {
maxParallelForks = userMaxForks ? : Runtime . runtime . availableProcessors ( )
ignoreFailures = userIgnoreFailures
MINOR: Update to Gradle 6.5 and tweak build jvm config (#8751)
Gradle 6.5 includes a fix for https://github.com/gradle/gradle/pull/12866, which
affects the performance of Scala compilation.
I profiled the scalac build with async profiler and 54% of the time was on GC
even after the Gradle upgrade (it was more than 60% before), so I switched to
the throughput GC (GC latency is less important for batch builds) and it
was reduced to 38%.
I also centralized the jvm configuration in `build.gradle` and simplified it a bit
by removing the minHeapSize configuration from the test tasks.
On my desktop, the time to execute clean builds with no cached Gradle daemon
was reduced from 127 seconds to 97 seconds. With a cached daemon, it was
reduced from 120 seconds to 88 seconds. The performance regression when
we upgraded to Gradle 6.x was 27 seconds with a cached daemon
(https://github.com/apache/kafka/pull/7677#issuecomment-616271179), so it
should be fixed now.
Gradle 6.4 with no cached daemon:
```
BUILD SUCCESSFUL in 2m 7s
115 actionable tasks: 112 executed, 3 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.15s user 0.12s system 0% cpu 2:08.06 total
```
Gradle 6.4 with cached daemon:
```
BUILD SUCCESSFUL in 2m 0s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 0.95s user 0.10s system 0% cpu 2:01.42 total
```
Gradle 6.5 with no cached daemon:
```
BUILD SUCCESSFUL in 1m 46s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.27s user 0.12s system 1% cpu 1:47.71 total
```
Gradle 6.5 with cached daemon:
```
BUILD SUCCESSFUL in 1m 37s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.02s user 0.10s system 1% cpu 1:38.31 total
```
This PR with no cached Gradle daemon:
```
BUILD SUCCESSFUL in 1m 37s
115 actionable tasks: 81 executed, 34 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.27s user 0.10s system 1% cpu 1:38.70 total
```
This PR with cached Gradle daemon:
```
BUILD SUCCESSFUL in 1m 28s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.02s user 0.10s system 1% cpu 1:29.35 total
```
Reviewers: Manikumar Reddy <manikumar.reddy@gmail.com>, Chia-Ping Tsai <chia7712@gmail.com>
5 years ago
maxHeapSize = defaultMaxHeapSize
jvmArgs = defaultJvmArgs
testLogging {
events = userTestLoggingEvents ? : testLoggingEvents
showStandardStreams = userShowStandardStreams ? : testShowStandardStreams
exceptionFormat = testExceptionFormat
}
logTestStdout . rehydrate ( delegate , owner , this ) ( )
// The suites are for running sets of tests in IDEs.
// Gradle will run each test class, so we exclude the suites to avoid redundantly running the tests twice.
exclude '**/*Suite.class'
if ( shouldUseJUnit5 )
useJUnitPlatform ( )
retry {
maxRetries = userMaxTestRetries
maxFailures = userMaxTestRetryFailures
}
}
task integrationTest ( type: Test , dependsOn: compileJava ) {
maxParallelForks = userMaxForks ? : Runtime . runtime . availableProcessors ( )
ignoreFailures = userIgnoreFailures
MINOR: Update to Gradle 6.5 and tweak build jvm config (#8751)
Gradle 6.5 includes a fix for https://github.com/gradle/gradle/pull/12866, which
affects the performance of Scala compilation.
I profiled the scalac build with async profiler and 54% of the time was on GC
even after the Gradle upgrade (it was more than 60% before), so I switched to
the throughput GC (GC latency is less important for batch builds) and it
was reduced to 38%.
I also centralized the jvm configuration in `build.gradle` and simplified it a bit
by removing the minHeapSize configuration from the test tasks.
On my desktop, the time to execute clean builds with no cached Gradle daemon
was reduced from 127 seconds to 97 seconds. With a cached daemon, it was
reduced from 120 seconds to 88 seconds. The performance regression when
we upgraded to Gradle 6.x was 27 seconds with a cached daemon
(https://github.com/apache/kafka/pull/7677#issuecomment-616271179), so it
should be fixed now.
Gradle 6.4 with no cached daemon:
```
BUILD SUCCESSFUL in 2m 7s
115 actionable tasks: 112 executed, 3 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.15s user 0.12s system 0% cpu 2:08.06 total
```
Gradle 6.4 with cached daemon:
```
BUILD SUCCESSFUL in 2m 0s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 0.95s user 0.10s system 0% cpu 2:01.42 total
```
Gradle 6.5 with no cached daemon:
```
BUILD SUCCESSFUL in 1m 46s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.27s user 0.12s system 1% cpu 1:47.71 total
```
Gradle 6.5 with cached daemon:
```
BUILD SUCCESSFUL in 1m 37s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.02s user 0.10s system 1% cpu 1:38.31 total
```
This PR with no cached Gradle daemon:
```
BUILD SUCCESSFUL in 1m 37s
115 actionable tasks: 81 executed, 34 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.27s user 0.10s system 1% cpu 1:38.70 total
```
This PR with cached Gradle daemon:
```
BUILD SUCCESSFUL in 1m 28s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.02s user 0.10s system 1% cpu 1:29.35 total
```
Reviewers: Manikumar Reddy <manikumar.reddy@gmail.com>, Chia-Ping Tsai <chia7712@gmail.com>
5 years ago
maxHeapSize = defaultMaxHeapSize
jvmArgs = defaultJvmArgs
testLogging {
events = userTestLoggingEvents ? : testLoggingEvents
showStandardStreams = userShowStandardStreams ? : testShowStandardStreams
exceptionFormat = testExceptionFormat
}
logTestStdout . rehydrate ( delegate , owner , this ) ( )
// The suites are for running sets of tests in IDEs.
// Gradle will run each test class, so we exclude the suites to avoid redundantly running the tests twice.
exclude '**/*Suite.class'
if ( shouldUseJUnit5 ) {
useJUnitPlatform {
includeTags "integration"
}
} else {
useJUnit {
includeCategories 'org.apache.kafka.test.IntegrationTest'
}
}
retry {
maxRetries = userMaxTestRetries
maxFailures = userMaxTestRetryFailures
}
}
task unitTest ( type: Test , dependsOn: compileJava ) {
maxParallelForks = userMaxForks ? : Runtime . runtime . availableProcessors ( )
ignoreFailures = userIgnoreFailures
MINOR: Update to Gradle 6.5 and tweak build jvm config (#8751)
Gradle 6.5 includes a fix for https://github.com/gradle/gradle/pull/12866, which
affects the performance of Scala compilation.
I profiled the scalac build with async profiler and 54% of the time was on GC
even after the Gradle upgrade (it was more than 60% before), so I switched to
the throughput GC (GC latency is less important for batch builds) and it
was reduced to 38%.
I also centralized the jvm configuration in `build.gradle` and simplified it a bit
by removing the minHeapSize configuration from the test tasks.
On my desktop, the time to execute clean builds with no cached Gradle daemon
was reduced from 127 seconds to 97 seconds. With a cached daemon, it was
reduced from 120 seconds to 88 seconds. The performance regression when
we upgraded to Gradle 6.x was 27 seconds with a cached daemon
(https://github.com/apache/kafka/pull/7677#issuecomment-616271179), so it
should be fixed now.
Gradle 6.4 with no cached daemon:
```
BUILD SUCCESSFUL in 2m 7s
115 actionable tasks: 112 executed, 3 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.15s user 0.12s system 0% cpu 2:08.06 total
```
Gradle 6.4 with cached daemon:
```
BUILD SUCCESSFUL in 2m 0s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 0.95s user 0.10s system 0% cpu 2:01.42 total
```
Gradle 6.5 with no cached daemon:
```
BUILD SUCCESSFUL in 1m 46s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.27s user 0.12s system 1% cpu 1:47.71 total
```
Gradle 6.5 with cached daemon:
```
BUILD SUCCESSFUL in 1m 37s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.02s user 0.10s system 1% cpu 1:38.31 total
```
This PR with no cached Gradle daemon:
```
BUILD SUCCESSFUL in 1m 37s
115 actionable tasks: 81 executed, 34 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.27s user 0.10s system 1% cpu 1:38.70 total
```
This PR with cached Gradle daemon:
```
BUILD SUCCESSFUL in 1m 28s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.02s user 0.10s system 1% cpu 1:29.35 total
```
Reviewers: Manikumar Reddy <manikumar.reddy@gmail.com>, Chia-Ping Tsai <chia7712@gmail.com>
5 years ago
maxHeapSize = defaultMaxHeapSize
jvmArgs = defaultJvmArgs
testLogging {
events = userTestLoggingEvents ? : testLoggingEvents
showStandardStreams = userShowStandardStreams ? : testShowStandardStreams
exceptionFormat = testExceptionFormat
}
logTestStdout . rehydrate ( delegate , owner , this ) ( )
// The suites are for running sets of tests in IDEs.
// Gradle will run each test class, so we exclude the suites to avoid redundantly running the tests twice.
exclude '**/*Suite.class'
if ( shouldUseJUnit5 ) {
useJUnitPlatform {
excludeTags "integration"
}
} else {
useJUnit {
excludeCategories 'org.apache.kafka.test.IntegrationTest'
}
}
retry {
maxRetries = userMaxTestRetries
maxFailures = userMaxTestRetryFailures
}
}
jar {
from "$rootDir/LICENSE"
from "$rootDir/NOTICE"
}
task srcJar ( type: Jar ) {
classifier = 'sources'
from "$rootDir/LICENSE"
from "$rootDir/NOTICE"
from sourceSets . main . allSource
}
task javadocJar ( type: Jar , dependsOn: javadoc ) {
classifier 'javadoc'
from "$rootDir/LICENSE"
from "$rootDir/NOTICE"
from javadoc . destinationDir
}
task docsJar ( dependsOn: javadocJar )
javadoc {
options . charSet = 'UTF-8'
options . docEncoding = 'UTF-8'
options . encoding = 'UTF-8'
}
task systemTestLibs ( dependsOn: jar )
artifacts {
archives srcJar
archives javadocJar
}
if ( ! sourceSets . test . allSource . isEmpty ( ) ) {
task testJar ( type: Jar ) {
classifier = 'test'
from "$rootDir/LICENSE"
from "$rootDir/NOTICE"
from sourceSets . test . output
}
task testSrcJar ( type: Jar , dependsOn: testJar ) {
classifier = 'test-sources'
from "$rootDir/LICENSE"
from "$rootDir/NOTICE"
from sourceSets . test . allSource
}
artifacts {
archives testJar
archives testSrcJar
}
}
plugins . withType ( ScalaPlugin ) {
scala {
zincVersion = versions . zinc
}
task scaladocJar ( type: Jar , dependsOn: scaladoc ) {
classifier = 'scaladoc'
from "$rootDir/LICENSE"
from "$rootDir/NOTICE"
from scaladoc . destinationDir
}
//documentation task should also trigger building scala doc jar
docsJar . dependsOn scaladocJar
}
tasks . withType ( ScalaCompile ) {
scalaCompileOptions . additionalParameters = [
"-deprecation" ,
"-unchecked" ,
"-encoding" , "utf8" ,
"-Xlog-reflective-calls" ,
"-feature" ,
"-language:postfixOps" ,
"-language:implicitConversions" ,
"-language:existentials" ,
"-Xlint:constant" ,
"-Xlint:delayedinit-select" ,
"-Xlint:doc-detached" ,
"-Xlint:missing-interpolator" ,
"-Xlint:nullary-unit" ,
"-Xlint:option-implicit" ,
"-Xlint:package-object-classes" ,
"-Xlint:poly-implicit-overload" ,
"-Xlint:private-shadow" ,
"-Xlint:stars-align" ,
"-Xlint:type-parameter-shadow" ,
"-Xlint:unused"
]
// See README.md for details on this option and the meaning of each value
if ( userScalaOptimizerMode . equals ( "method" ) )
scalaCompileOptions . additionalParameters + = [ "-opt:l:method" ]
else if ( userScalaOptimizerMode . startsWith ( "inline-" ) ) {
List < String > inlineFrom = [ "-opt-inline-from:org.apache.kafka.**" ]
if ( project . name . equals ( 'core' ) )
inlineFrom . add ( "-opt-inline-from:kafka.**" )
if ( userScalaOptimizerMode . equals ( "inline-scala" ) )
inlineFrom . add ( "-opt-inline-from:scala.**" )
scalaCompileOptions . additionalParameters + = [ "-opt:l:inline" ]
scalaCompileOptions . additionalParameters + = inlineFrom
}
if ( versions . baseScala ! = '2.12' ) {
scalaCompileOptions . additionalParameters + = [ "-opt-warnings" , "-Xlint:strict-unsealed-patmat" ]
// Scala 2.13.2 introduces compiler warnings suppression, which is a pre-requisite for -Xfatal-warnings
scalaCompileOptions . additionalParameters + = [ "-Xfatal-warnings" ]
}
// these options are valid for Scala versions < 2.13 only
// Scala 2.13 removes them, see https://github.com/scala/scala/pull/6502 and https://github.com/scala/scala/pull/5969
if ( versions . baseScala = = '2.12' ) {
scalaCompileOptions . additionalParameters + = [
"-Xlint:by-name-right-associative" ,
"-Xlint:nullary-override" ,
"-Xlint:unsound-match"
]
}
// Scalac's `-release` requires Java 9 or higher
if ( JavaVersion . current ( ) . isJava9Compatible ( ) )
scalaCompileOptions . additionalParameters + = [ "-release" , minJavaVersion ]
configure ( scalaCompileOptions . forkOptions ) {
MINOR: Update to Gradle 6.5 and tweak build jvm config (#8751)
Gradle 6.5 includes a fix for https://github.com/gradle/gradle/pull/12866, which
affects the performance of Scala compilation.
I profiled the scalac build with async profiler and 54% of the time was on GC
even after the Gradle upgrade (it was more than 60% before), so I switched to
the throughput GC (GC latency is less important for batch builds) and it
was reduced to 38%.
I also centralized the jvm configuration in `build.gradle` and simplified it a bit
by removing the minHeapSize configuration from the test tasks.
On my desktop, the time to execute clean builds with no cached Gradle daemon
was reduced from 127 seconds to 97 seconds. With a cached daemon, it was
reduced from 120 seconds to 88 seconds. The performance regression when
we upgraded to Gradle 6.x was 27 seconds with a cached daemon
(https://github.com/apache/kafka/pull/7677#issuecomment-616271179), so it
should be fixed now.
Gradle 6.4 with no cached daemon:
```
BUILD SUCCESSFUL in 2m 7s
115 actionable tasks: 112 executed, 3 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.15s user 0.12s system 0% cpu 2:08.06 total
```
Gradle 6.4 with cached daemon:
```
BUILD SUCCESSFUL in 2m 0s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 0.95s user 0.10s system 0% cpu 2:01.42 total
```
Gradle 6.5 with no cached daemon:
```
BUILD SUCCESSFUL in 1m 46s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.27s user 0.12s system 1% cpu 1:47.71 total
```
Gradle 6.5 with cached daemon:
```
BUILD SUCCESSFUL in 1m 37s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.02s user 0.10s system 1% cpu 1:38.31 total
```
This PR with no cached Gradle daemon:
```
BUILD SUCCESSFUL in 1m 37s
115 actionable tasks: 81 executed, 34 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.27s user 0.10s system 1% cpu 1:38.70 total
```
This PR with cached Gradle daemon:
```
BUILD SUCCESSFUL in 1m 28s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.02s user 0.10s system 1% cpu 1:29.35 total
```
Reviewers: Manikumar Reddy <manikumar.reddy@gmail.com>, Chia-Ping Tsai <chia7712@gmail.com>
5 years ago
memoryMaximumSize = defaultMaxHeapSize
jvmArgs = defaultJvmArgs
}
}
checkstyle {
configFile = new File ( rootDir , "checkstyle/checkstyle.xml" )
configProperties = checkstyleConfigProperties ( "import-control.xml" )
toolVersion = versions . checkstyle
}
configure ( checkstyleMain ) {
group = 'Verification'
description = 'Run checkstyle on all main Java sources'
}
configure ( checkstyleTest ) {
group = 'Verification'
description = 'Run checkstyle on all test Java sources'
}
test . dependsOn ( 'checkstyleMain' , 'checkstyleTest' )
spotbugs {
toolVersion = versions . spotbugs
excludeFilter = file ( "$rootDir/gradle/spotbugs-exclude.xml" )
ignoreFailures = false
}
test . dependsOn ( 'spotbugsMain' )
tasks . withType ( com . github . spotbugs . snom . SpotBugsTask ) {
reports {
// Continue supporting `xmlFindBugsReport` for compatibility
xml . enabled ( project . hasProperty ( 'xmlSpotBugsReport' ) | | project . hasProperty ( 'xmlFindBugsReport' ) )
html . enabled ( ! project . hasProperty ( 'xmlSpotBugsReport' ) & & ! project . hasProperty ( 'xmlFindBugsReport' ) )
}
MINOR: Update to Gradle 6.5 and tweak build jvm config (#8751)
Gradle 6.5 includes a fix for https://github.com/gradle/gradle/pull/12866, which
affects the performance of Scala compilation.
I profiled the scalac build with async profiler and 54% of the time was on GC
even after the Gradle upgrade (it was more than 60% before), so I switched to
the throughput GC (GC latency is less important for batch builds) and it
was reduced to 38%.
I also centralized the jvm configuration in `build.gradle` and simplified it a bit
by removing the minHeapSize configuration from the test tasks.
On my desktop, the time to execute clean builds with no cached Gradle daemon
was reduced from 127 seconds to 97 seconds. With a cached daemon, it was
reduced from 120 seconds to 88 seconds. The performance regression when
we upgraded to Gradle 6.x was 27 seconds with a cached daemon
(https://github.com/apache/kafka/pull/7677#issuecomment-616271179), so it
should be fixed now.
Gradle 6.4 with no cached daemon:
```
BUILD SUCCESSFUL in 2m 7s
115 actionable tasks: 112 executed, 3 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.15s user 0.12s system 0% cpu 2:08.06 total
```
Gradle 6.4 with cached daemon:
```
BUILD SUCCESSFUL in 2m 0s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 0.95s user 0.10s system 0% cpu 2:01.42 total
```
Gradle 6.5 with no cached daemon:
```
BUILD SUCCESSFUL in 1m 46s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.27s user 0.12s system 1% cpu 1:47.71 total
```
Gradle 6.5 with cached daemon:
```
BUILD SUCCESSFUL in 1m 37s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.02s user 0.10s system 1% cpu 1:38.31 total
```
This PR with no cached Gradle daemon:
```
BUILD SUCCESSFUL in 1m 37s
115 actionable tasks: 81 executed, 34 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.27s user 0.10s system 1% cpu 1:38.70 total
```
This PR with cached Gradle daemon:
```
BUILD SUCCESSFUL in 1m 28s
115 actionable tasks: 111 executed, 4 up-to-date
./gradlew clean compileScala compileJava compileTestScala compileTestJava 1.02s user 0.10s system 1% cpu 1:29.35 total
```
Reviewers: Manikumar Reddy <manikumar.reddy@gmail.com>, Chia-Ping Tsai <chia7712@gmail.com>
5 years ago
maxHeapSize = defaultMaxHeapSize
jvmArgs = defaultJvmArgs
}
// Ignore core since its a scala project
if ( it . path ! = ':core' ) {
if ( userEnableTestCoverage ) {
apply plugin: "jacoco"
jacoco {
toolVersion = versions . jacoco
}
// NOTE: Jacoco Gradle plugin does not support "offline instrumentation" this means that classes mocked by PowerMock
// may report 0 coverage, since the source was modified after initial instrumentation.
// See https://github.com/jacoco/jacoco/issues/51
jacocoTestReport {
dependsOn tasks . test
sourceSets sourceSets . main
reports {
html . enabled = true
xml . enabled = true
csv . enabled = false
}
}
}
}
if ( userEnableTestCoverage ) {
def coverageGen = it . path = = ':core' ? 'reportScoverage' : 'jacocoTestReport'
task reportCoverage ( dependsOn: [ coverageGen ] )
}
task determineCommitId {
def takeFromHash = 16
if ( commitId ) {
commitId = commitId . take ( takeFromHash )
} else if ( file ( "$rootDir/.git/HEAD" ) . exists ( ) ) {
def headRef = file ( "$rootDir/.git/HEAD" ) . text
if ( headRef . contains ( 'ref: ' ) ) {
headRef = headRef . replaceAll ( 'ref: ' , '' ) . trim ( )
if ( file ( "$rootDir/.git/$headRef" ) . exists ( ) ) {
commitId = file ( "$rootDir/.git/$headRef" ) . text . trim ( ) . take ( takeFromHash )
}
} else {
commitId = headRef . trim ( ) . take ( takeFromHash )
}
} else {
commitId = "unknown"
}
}
}
gradle . taskGraph . whenReady { taskGraph - >
taskGraph . getAllTasks ( ) . findAll { it . name . contains ( 'spotbugsScoverage' ) | | it . name . contains ( 'spotbugsTest' ) } . each { task - >
task . enabled = false
}
}
def fineTuneEclipseClasspathFile ( eclipse , project ) {
eclipse . classpath . file {
beforeMerged { cp - >
cp . entries . clear ( )
// for the core project add the directories defined under test/scala as separate source directories
if ( project . name . equals ( 'core' ) ) {
cp . entries . add ( new org . gradle . plugins . ide . eclipse . model . SourceFolder ( "src/test/scala/integration" , null ) )
cp . entries . add ( new org . gradle . plugins . ide . eclipse . model . SourceFolder ( "src/test/scala/other" , null ) )
cp . entries . add ( new org . gradle . plugins . ide . eclipse . model . SourceFolder ( "src/test/scala/unit" , null ) )
}
}
whenMerged { cp - >
// for the core project exclude the separate sub-directories defined under test/scala. These are added as source dirs above
if ( project . name . equals ( 'core' ) ) {
cp . entries . findAll { it . kind = = "src" & & it . path . equals ( "src/test/scala" ) } * . excludes = [ "integration/" , "other/" , "unit/" ]
}
/ *
* Set all eclipse build output to go to 'build_eclipse' directory . This is to ensure that gradle and eclipse use different
* build output directories , and also avoid using the eclpise default of 'bin' which clashes with some of our script directories .
* https: //discuss.gradle.org/t/eclipse-generated-files-should-be-put-in-the-same-place-as-the-gradle-generated-files/6986/2
* /
cp . entries . findAll { it . kind = = "output" } * . path = "build_eclipse"
/ *
* Some projects have explicitly added test output dependencies . These are required for the gradle build but not required
* in Eclipse since the dependent projects are added as dependencies . So clean up these from the generated classpath .
* /
cp . entries . removeAll { it . kind = = "lib" & & it . path . matches ( ".*/build/(classes|resources)/test" ) }
}
}
}
def checkstyleConfigProperties ( configFileName ) {
[ importControlFile: "$rootDir/checkstyle/$configFileName" ,
suppressionsFile: "$rootDir/checkstyle/suppressions.xml" ,
headerFile: "$rootDir/checkstyle/java.header" ]
}
// Aggregates all jacoco results into the root project directory
if ( userEnableTestCoverage ) {
task jacocoRootReport ( type: org . gradle . testing . jacoco . tasks . JacocoReport ) {
def javaProjects = subprojects . findAll { it . path ! = ':core' }
description = 'Generates an aggregate report from all subprojects'
dependsOn ( javaProjects . test )
additionalSourceDirs . from = javaProjects . sourceSets . main . allSource . srcDirs
sourceDirectories . from = javaProjects . sourceSets . main . allSource . srcDirs
classDirectories . from = javaProjects . sourceSets . main . output
executionData . from = javaProjects . jacocoTestReport . executionData
reports {
html . enabled = true
xml . enabled = true
}
// workaround to ignore projects that don't have any tests at all
onlyIf = { true }
doFirst {
executionData = files ( executionData . findAll { it . exists ( ) } )
}
}
}
if ( userEnableTestCoverage ) {
task reportCoverage ( dependsOn: [ 'jacocoRootReport' , 'core:reportCoverage' ] )
}
def connectPkgs = [
'connect:api' ,
'connect:basic-auth-extension' ,
'connect:file' ,
'connect:json' ,
'connect:runtime' ,
KAFKA-7500: MirrorMaker 2.0 (KIP-382)
Implementation of [KIP-382 "MirrorMaker 2.0"](https://cwiki.apache.org/confluence/display/KAFKA/KIP-382%3A+MirrorMaker+2.0)
Author: Ryanne Dolan <ryannedolan@gmail.com>
Author: Arun Mathew <arunmathew88@gmail.com>
Author: In Park <inpark@cloudera.com>
Author: Andre Price <obsoleted@users.noreply.github.com>
Author: christian.hagel@rio.cloud <christian.hagel@rio.cloud>
Reviewers: Eno Thereska <eno.thereska@gmail.com>, William Hammond <william.t.hammond@gmail.com>, Viktor Somogyi <viktorsomogyi@gmail.com>, Jakub Korzeniowski, Tim Carey-Smith, Kamal Chandraprakash <kamal.chandraprakash@gmail.com>, Arun Mathew, Jeremy-l-ford, vpernin, Oleg Kasian <oleg.kasian@gmail.com>, Mickael Maison <mickael.maison@gmail.com>, Qihong Chen, Sriharsha Chintalapani <sriharsha@apache.org>, Jun Rao <junrao@gmail.com>, Randall Hauch <rhauch@gmail.com>, Manikumar Reddy <manikumar.reddy@gmail.com>, Ismael Juma <ismael@juma.me.uk>
Closes #6295 from ryannedolan/KIP-382
5 years ago
'connect:transforms' ,
'connect:mirror' ,
'connect:mirror-client'
]
tasks . create ( name: "jarConnect" , dependsOn: connectPkgs . collect { it + ":jar" } ) { }
tasks . create ( name: "testConnect" , dependsOn: connectPkgs . collect { it + ":test" } ) { }
project ( ':core' ) {
println "Building project 'core' with Scala version ${versions.scala}"
apply plugin: 'scala'
// scaladoc generation is configured at the sub-module level with an artifacts
// block (cf. see streams-scala). If scaladoc generation is invoked explicitly
// for the `core` module, this ensures the generated jar doesn't include scaladoc
// files since the `core` module doesn't include public APIs.
scaladoc {
enabled = false
}
if ( userEnableTestCoverage )
apply plugin: "org.scoverage"
archivesBaseName = "kafka_${versions.baseScala}"
dependencies {
compile project ( ':clients' )
compile project ( ':metadata' )
compile project ( ':raft' )
compile libs . argparse4j
compile libs . jacksonDatabind
compile libs . jacksonModuleScala
compile libs . jacksonDataformatCsv
compile libs . jacksonJDK8Datatypes
compile libs . joptSimple
compile libs . metrics
MINOR: Make the build compile with Scala 2.13 (#6989)
Scala 2.13 support was added to build via #5454. This PR adjusts the code so that
it compiles with 2.11, 2.12 and 2.13.
Changes:
* Add `scala-collection-compat` dependency.
* Import `scala.collection.Seq` in a number of places for consistent behavior between
Scala 2.11, 2.12 and 2.13.
* Remove wildcard imports that were causing the Java classes to have priority over the
Scala ones, related Scala issue: https://github.com/scala/scala/pull/6589.
* Replace parallel collection usage with `Future`. The former is no longer included by
default in the standard library.
* Replace val _: Unit workaround with one that is more concise and works with Scala 2.13
* Replace `filterKeys` with `filter` when we expect a `Map`. `filterKeys` returns a view
that doesn't implement the `Map` trait in Scala 2.13.
* Replace `mapValues` with `map` or add a `toMap` as an additional transformation
when we expect a `Map`. `mapValues` returns a view that doesn't implement the
`Map` trait in Scala 2.13.
* Replace `breakOut` with `iterator` and `to`, `breakOut` was removed in Scala
2.13.
* Replace to() with toMap, toIndexedSeq and toSet
* Replace `mutable.Buffer.--` with `filterNot`.
* ControlException is an abstract class in Scala 2.13.
* Variable arguments can only receive arrays or immutable.Seq in Scala 2.13.
* Use `Factory` instead of `CanBuildFrom` in DecodeJson. `CanBuildFrom` behaves
a bit differently in Scala 2.13 and it's been deprecated. `Factory` has the behavior
we need and it's available via the compat library.
* Fix failing tests due to behavior change in Scala 2.13,
"Map.values.map is not strict in Scala 2.13" (https://github.com/scala/bug/issues/11589).
* Use Java collections instead of Scala ones in StreamResetter (a Java class).
* Adjust CheckpointFile.write to take an `Iterable` instead of `Seq` to avoid
unnecessary collection copies.
* Fix DelayedElectLeader to use a Map instead of Set and avoid `to` call that
doesn't work in Scala 2.13.
* Use unordered map for mapping in SimpleAclAuthorizer, mapping of ordered
maps require an `Ordering` in Scala 2.13 for safety reasons.
* Adapt `ConsumerGroupCommand` to compile with Scala 2.13.
* CoreUtils.min takes an `Iterable` instead of `TraversableOnce`, the latter does
not exist in Scala 2.13.
* Replace `Unit` with `()` in a couple places. Scala 2.13 is stricter when it expects
a value instead of a type.
* Fix bug in CustomQuotaCallbackTest where we did not necessarily set `partitionRatio`
correctly, `forall` can terminate early.
* Add a couple of spotbugs exclusions that are needed by code generated by Scala 2.13
* Remove unused variables, simplify some code and remove procedure syntax in a few
places.
* Remove unused `CoreUtils.JSONEscapeString`.
Reviewers: Manikumar Reddy <manikumar.reddy@gmail.com>, José Armando García Sancio <jsancio@users.noreply.github.com>
5 years ago
compile libs . scalaCollectionCompat
compile libs . scalaJava8Compat
compile libs . scalaLibrary
// only needed transitively, but set it explicitly to ensure it has the same version as scala-library
compile libs . scalaReflect
compile libs . scalaLogging
compile libs . slf4jApi
compile ( libs . zookeeper ) {
exclude module: 'slf4j-log4j12'
exclude module: 'log4j'
}
// ZooKeeperMain depends on commons-cli but declares the dependency as `provided`
compile libs . commonsCli
compileOnly libs . log4j
testCompile project ( ':clients' ) . sourceSets . test . output
testCompile libs . bcpkix
testCompile libs . mockitoCore
testCompile libs . easymock
testCompile ( libs . apacheda ) {
exclude group: 'xml-apis' , module: 'xml-apis'
// `mina-core` is a transitive dependency for `apacheds` and `apacheda`.
// It is safer to use from `apacheds` since that is the implementation.
exclude module: 'mina-core'
}
testCompile libs . apachedsCoreApi
testCompile libs . apachedsInterceptorKerberos
testCompile libs . apachedsProtocolShared
testCompile libs . apachedsProtocolKerberos
testCompile libs . apachedsProtocolLdap
testCompile libs . apachedsLdifPartition
testCompile libs . apachedsMavibotPartition
testCompile libs . apachedsJdbmPartition
testCompile libs . junitJupiter
testCompile libs . slf4jlog4j
testCompile ( libs . jfreechart ) {
exclude group: 'junit' , module: 'junit'
}
}
if ( userEnableTestCoverage ) {
scoverage {
scoverageVersion = versions . scoverage
reportDir = file ( "${rootProject.buildDir}/scoverage" )
highlighting = false
minimumRate = 0.0
}
}
configurations {
// manually excludes some unnecessary dependencies
compile . exclude module: 'javax'
compile . exclude module: 'jline'
compile . exclude module: 'jms'
compile . exclude module: 'jmxri'
compile . exclude module: 'jmxtools'
compile . exclude module: 'mail'
// To prevent a UniqueResourceException due the same resource existing in both
// org.apache.directory.api/api-all and org.apache.directory.api/api-ldap-schema-data
testCompile . exclude module: 'api-ldap-schema-data'
}
tasks . create ( name: "copyDependantLibs" , type: Copy ) {
from ( configurations . testRuntime ) {
include ( 'slf4j-log4j12*' )
include ( 'log4j*jar' )
}
from ( configurations . runtime ) {
exclude ( 'kafka-clients*' )
}
into "$buildDir/dependant-libs-${versions.scala}"
duplicatesStrategy 'exclude'
}
task processMessages ( type: JavaExec ) {
main = "org.apache.kafka.message.MessageGenerator"
classpath = project ( ':generator' ) . sourceSets . main . runtimeClasspath
args = [ "-p" , "kafka.internals.generated" ,
"-o" , "src/generated/java/kafka/internals/generated" ,
"-i" , "src/main/resources/common/message" ,
"-m" , "MessageDataGenerator"
]
inputs . dir ( "src/main/resources/common/message" )
outputs . dir ( "src/generated/java/kafka/internals/generated" )
}
compileJava . dependsOn 'processMessages'
task genProtocolErrorDocs ( type: JavaExec ) {
classpath = sourceSets . main . runtimeClasspath
main = 'org.apache.kafka.common.protocol.Errors'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "protocol_errors.html" ) . newOutputStream ( )
}
task genProtocolTypesDocs ( type: JavaExec ) {
classpath = sourceSets . main . runtimeClasspath
main = 'org.apache.kafka.common.protocol.types.Type'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "protocol_types.html" ) . newOutputStream ( )
}
task genProtocolApiKeyDocs ( type: JavaExec ) {
classpath = sourceSets . main . runtimeClasspath
main = 'org.apache.kafka.common.protocol.ApiKeys'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "protocol_api_keys.html" ) . newOutputStream ( )
}
task genProtocolMessageDocs ( type: JavaExec ) {
classpath = sourceSets . main . runtimeClasspath
main = 'org.apache.kafka.common.protocol.Protocol'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "protocol_messages.html" ) . newOutputStream ( )
}
task genAdminClientConfigDocs ( type: JavaExec ) {
classpath = sourceSets . main . runtimeClasspath
main = 'org.apache.kafka.clients.admin.AdminClientConfig'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "admin_client_config.html" ) . newOutputStream ( )
}
task genProducerConfigDocs ( type: JavaExec ) {
classpath = sourceSets . main . runtimeClasspath
main = 'org.apache.kafka.clients.producer.ProducerConfig'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "producer_config.html" ) . newOutputStream ( )
}
task genConsumerConfigDocs ( type: JavaExec ) {
classpath = sourceSets . main . runtimeClasspath
main = 'org.apache.kafka.clients.consumer.ConsumerConfig'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "consumer_config.html" ) . newOutputStream ( )
}
task genKafkaConfigDocs ( type: JavaExec ) {
classpath = sourceSets . main . runtimeClasspath
main = 'kafka.server.KafkaConfig'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "kafka_config.html" ) . newOutputStream ( )
}
KAFKA-3809: Auto-generate documentation for topic-level configuration
ijuma said that it would make sense to split out this work from KAFKA-3234, since KAFKA-3234 had both a mechanical change (generating docs) as well as a change requiring discussion (deprecating/renaming config options).
jjkoshy, I hope you don't mind that I took over this work. It's been 3 months since the last activity on KAFKA-3234, so I thought it would be okay to take over.
This work is essentially is the first 5-6 commits from Joel's https://github.com/apache/kafka/pull/907. However, since I'm not very experienced with git, I didn't do a direct merge/rebase, but instead largely hand-merged it. I did some minor cleanup. All credit goes to Joel, all blame goes to me. :)
For reference, I attached the auto-generated configuration.html file (as a PDF, because github won't let me attache html).
[configuration.pdf](https://github.com/apache/kafka/files/323901/configuration.pdf)
This is my first time writing Scala, so let me know if there are any changes needed.
I don't know who is the right person to review this. ijuma, can you help me redirect this to the appropriate person? Thanks.
Author: James Cheng <jylcheng@yahoo.com>
Reviewers: Ismael Juma <ismael@juma.me.uk>, Joel Koshy <jjkoshy@gmail.com>, Ewen Cheslack-Postava <ewen@confluent.io>
Closes #1527 from wushujames/generate_topic_docs
8 years ago
task genTopicConfigDocs ( type: JavaExec ) {
classpath = sourceSets . main . runtimeClasspath
main = 'kafka.log.LogConfig'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "topic_config.html" ) . newOutputStream ( )
}
task genConsumerMetricsDocs ( type: JavaExec ) {
classpath = sourceSets . test . runtimeClasspath
main = 'org.apache.kafka.clients.consumer.internals.ConsumerMetrics'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "consumer_metrics.html" ) . newOutputStream ( )
}
task genProducerMetricsDocs ( type: JavaExec ) {
classpath = sourceSets . test . runtimeClasspath
main = 'org.apache.kafka.clients.producer.internals.ProducerMetrics'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "producer_metrics.html" ) . newOutputStream ( )
}
task siteDocsTar ( dependsOn: [ 'genProtocolErrorDocs' , 'genProtocolTypesDocs' , 'genProtocolApiKeyDocs' , 'genProtocolMessageDocs' ,
'genAdminClientConfigDocs' , 'genProducerConfigDocs' , 'genConsumerConfigDocs' ,
'genKafkaConfigDocs' , 'genTopicConfigDocs' ,
':connect:runtime:genConnectConfigDocs' , ':connect:runtime:genConnectTransformationDocs' ,
':connect:runtime:genConnectPredicateDocs' ,
':connect:runtime:genSinkConnectorConfigDocs' , ':connect:runtime:genSourceConnectorConfigDocs' ,
':streams:genStreamsConfigDocs' , 'genConsumerMetricsDocs' , 'genProducerMetricsDocs' ,
':connect:runtime:genConnectMetricsDocs' ] , type: Tar ) {
classifier = 'site-docs'
compression = Compression . GZIP
from project . file ( "$rootDir/docs" )
into 'site-docs'
duplicatesStrategy 'exclude'
}
tasks . create ( name: "releaseTarGz" , dependsOn: configurations . archives . artifacts , type: Tar ) {
into "kafka_${versions.baseScala}-${version}"
compression = Compression . GZIP
from ( project . file ( "$rootDir/bin" ) ) { into "bin/" }
from ( project . file ( "$rootDir/config" ) ) { into "config/" }
from "$rootDir/LICENSE"
from "$rootDir/NOTICE"
from ( configurations . runtime ) { into ( "libs/" ) }
from ( configurations . archives . artifacts . files ) { into ( "libs/" ) }
from ( project . siteDocsTar ) { into ( "site-docs/" ) }
from ( project ( ':tools' ) . jar ) { into ( "libs/" ) }
from ( project ( ':tools' ) . configurations . runtime ) { into ( "libs/" ) }
from ( project ( ':connect:api' ) . jar ) { into ( "libs/" ) }
from ( project ( ':connect:api' ) . configurations . runtime ) { into ( "libs/" ) }
from ( project ( ':connect:runtime' ) . jar ) { into ( "libs/" ) }
from ( project ( ':connect:runtime' ) . configurations . runtime ) { into ( "libs/" ) }
from ( project ( ':connect:transforms' ) . jar ) { into ( "libs/" ) }
from ( project ( ':connect:transforms' ) . configurations . runtime ) { into ( "libs/" ) }
from ( project ( ':connect:json' ) . jar ) { into ( "libs/" ) }
from ( project ( ':connect:json' ) . configurations . runtime ) { into ( "libs/" ) }
from ( project ( ':connect:file' ) . jar ) { into ( "libs/" ) }
from ( project ( ':connect:file' ) . configurations . runtime ) { into ( "libs/" ) }
from ( project ( ':connect:basic-auth-extension' ) . jar ) { into ( "libs/" ) }
from ( project ( ':connect:basic-auth-extension' ) . configurations . runtime ) { into ( "libs/" ) }
KAFKA-7500: MirrorMaker 2.0 (KIP-382)
Implementation of [KIP-382 "MirrorMaker 2.0"](https://cwiki.apache.org/confluence/display/KAFKA/KIP-382%3A+MirrorMaker+2.0)
Author: Ryanne Dolan <ryannedolan@gmail.com>
Author: Arun Mathew <arunmathew88@gmail.com>
Author: In Park <inpark@cloudera.com>
Author: Andre Price <obsoleted@users.noreply.github.com>
Author: christian.hagel@rio.cloud <christian.hagel@rio.cloud>
Reviewers: Eno Thereska <eno.thereska@gmail.com>, William Hammond <william.t.hammond@gmail.com>, Viktor Somogyi <viktorsomogyi@gmail.com>, Jakub Korzeniowski, Tim Carey-Smith, Kamal Chandraprakash <kamal.chandraprakash@gmail.com>, Arun Mathew, Jeremy-l-ford, vpernin, Oleg Kasian <oleg.kasian@gmail.com>, Mickael Maison <mickael.maison@gmail.com>, Qihong Chen, Sriharsha Chintalapani <sriharsha@apache.org>, Jun Rao <junrao@gmail.com>, Randall Hauch <rhauch@gmail.com>, Manikumar Reddy <manikumar.reddy@gmail.com>, Ismael Juma <ismael@juma.me.uk>
Closes #6295 from ryannedolan/KIP-382
5 years ago
from ( project ( ':connect:mirror' ) . jar ) { into ( "libs/" ) }
from ( project ( ':connect:mirror' ) . configurations . runtime ) { into ( "libs/" ) }
from ( project ( ':connect:mirror-client' ) . jar ) { into ( "libs/" ) }
from ( project ( ':connect:mirror-client' ) . configurations . runtime ) { into ( "libs/" ) }
from ( project ( ':streams' ) . jar ) { into ( "libs/" ) }
from ( project ( ':streams' ) . configurations . runtime ) { into ( "libs/" ) }
from ( project ( ':streams:streams-scala' ) . jar ) { into ( "libs/" ) }
from ( project ( ':streams:streams-scala' ) . configurations . runtime ) { into ( "libs/" ) }
from ( project ( ':streams:test-utils' ) . jar ) { into ( "libs/" ) }
from ( project ( ':streams:test-utils' ) . configurations . runtime ) { into ( "libs/" ) }
from ( project ( ':streams:examples' ) . jar ) { into ( "libs/" ) }
from ( project ( ':streams:examples' ) . configurations . runtime ) { into ( "libs/" ) }
duplicatesStrategy 'exclude'
}
jar {
dependsOn ( 'copyDependantLibs' )
}
jar . manifest {
attributes (
'Version' : "${version}"
)
}
tasks . create ( name: "copyDependantTestLibs" , type: Copy ) {
from ( configurations . testRuntime ) {
include ( '*.jar' )
}
into "$buildDir/dependant-testlibs"
//By default gradle does not handle test dependencies between the sub-projects
//This line is to include clients project test jar to dependant-testlibs
from ( project ( ':clients' ) . testJar ) { "$buildDir/dependant-testlibs" }
duplicatesStrategy 'exclude'
}
systemTestLibs . dependsOn ( 'jar' , 'testJar' , 'copyDependantTestLibs' )
checkstyle {
configProperties = checkstyleConfigProperties ( "import-control-core.xml" )
}
sourceSets {
main {
java {
srcDirs = [ "src/generated/java" , "src/main/java" ]
}
}
test {
java {
srcDirs = [ ]
}
scala {
srcDirs = [ "src/test/java" , "src/test/scala" ]
}
}
}
}
project ( ':metadata' ) {
archivesBaseName = "kafka-metadata"
dependencies {
compile project ( ':clients' )
compile libs . jacksonDatabind
compile libs . jacksonJDK8Datatypes
KAFKA-12276: Add the quorum controller code (#10070)
The quorum controller stores metadata in the KIP-500 metadata log, not in Apache
ZooKeeper. Each controller node is a voter in the metadata quorum. The leader of the
quorum is the active controller, which processes write requests. The followers are standby
controllers, which replay the operations written to the log. If the active controller goes away,
a standby controller can take its place.
Like the ZooKeeper-based controller, the quorum controller is based on an event queue
backed by a single-threaded executor. However, unlike the ZK-based controller, the quorum
controller can have multiple operations in flight-- it does not need to wait for one operation
to be finished before starting another. Therefore, calls into the QuorumController return
CompleteableFuture objects which are completed with either a result or an error when the
operation is done. The QuorumController will also time out operations that have been
sitting on the queue too long without being processed. In this case, the future is completed
with a TimeoutException.
The controller uses timeline data structures to store multiple "versions" of its in-memory
state simultaneously. "Read operations" read only committed state, which is slightly older
than the most up-to-date in-memory state. "Write operations" read and write the latest
in-memory state. However, we can not return a successful result for a write operation until
its state has been committed to the log. Therefore, if a client receives an RPC response, it
knows that the requested operation has been performed, and can not be undone by a
controller failover.
Reviewers: Jun Rao <junrao@gmail.com>, Ron Dagostino <rdagostino@confluent.io>
4 years ago
compile libs . metrics
compileOnly libs . log4j
testCompile libs . junitJupiter
testCompile libs . hamcrest
testCompile libs . slf4jlog4j
testCompile project ( ':clients' ) . sourceSets . test . output
}
task processMessages ( type: JavaExec ) {
main = "org.apache.kafka.message.MessageGenerator"
classpath = project ( ':generator' ) . sourceSets . main . runtimeClasspath
args = [ "-p" , "org.apache.kafka.common.metadata" ,
"-o" , "src/generated/java/org/apache/kafka/common/metadata" ,
"-i" , "src/main/resources/common/metadata" ,
"-m" , "MessageDataGenerator" , "JsonConverterGenerator" ,
"-t" , "MetadataRecordTypeGenerator" , "MetadataJsonConvertersGenerator"
]
inputs . dir ( "src/main/resources/common/metadata" )
outputs . dir ( "src/generated/java/org/apache/kafka/common/metadata" )
}
compileJava . dependsOn 'processMessages'
sourceSets {
main {
java {
srcDirs = [ "src/generated/java" , "src/main/java" ]
}
}
test {
java {
srcDirs = [ "src/generated/java" , "src/test/java" ]
}
}
}
javadoc {
enabled = false
}
}
project ( ':examples' ) {
archivesBaseName = "kafka-examples"
dependencies {
compile project ( ':core' )
}
javadoc {
enabled = false
}
checkstyle {
configProperties = checkstyleConfigProperties ( "import-control-core.xml" )
}
}
project ( ':generator' ) {
dependencies {
compile libs . argparse4j
compile libs . jacksonDatabind
compile libs . jacksonJDK8Datatypes
compile libs . jacksonJaxrsJsonProvider
testCompile libs . junitJupiter
}
javadoc {
enabled = false
}
}
project ( ':clients' ) {
archivesBaseName = "kafka-clients"
configurations {
jacksonDatabindConfig
}
// add jacksonDatabindConfig as provided scope config with high priority (1000)
conf2ScopeMappings . addMapping ( 1000 , configurations . jacksonDatabindConfig , "provided" )
dependencies {
compile libs . zstd
compile libs . lz4
compile libs . snappy
compile libs . slf4jApi
compileOnly libs . jacksonDatabind // for SASL/OAUTHBEARER bearer token parsing
compileOnly libs . jacksonJDK8Datatypes
jacksonDatabindConfig libs . jacksonDatabind // to publish as provided scope dependency.
testCompile libs . bcpkix
testCompile libs . junitJupiter
testCompile libs . mockitoCore
testRuntime libs . slf4jlog4j
testRuntime libs . jacksonDatabind
testRuntime libs . jacksonJDK8Datatypes
testCompile libs . jacksonJaxrsJsonProvider
}
task createVersionFile ( dependsOn: determineCommitId ) {
ext . receiptFile = file ( "$buildDir/kafka/$buildVersionFileName" )
outputs . file receiptFile
outputs . upToDateWhen { false }
doLast {
def data = [
commitId: commitId ,
version: version ,
]
receiptFile . parentFile . mkdirs ( )
def content = data . entrySet ( ) . collect { "$it.key=$it.value" } . sort ( ) . join ( "\n" )
receiptFile . setText ( content , "ISO-8859-1" )
}
}
jar {
dependsOn createVersionFile
from ( "$buildDir" ) {
include "kafka/$buildVersionFileName"
}
}
clean . doFirst {
delete "$buildDir/kafka/"
}
task processMessages ( type: JavaExec ) {
main = "org.apache.kafka.message.MessageGenerator"
classpath = project ( ':generator' ) . sourceSets . main . runtimeClasspath
args = [ "-p" , "org.apache.kafka.common.message" ,
"-o" , "src/generated/java/org/apache/kafka/common/message" ,
"-i" , "src/main/resources/common/message" ,
"-t" , "ApiMessageTypeGenerator" ,
"-m" , "MessageDataGenerator" , "JsonConverterGenerator"
]
inputs . dir ( "src/main/resources/common/message" )
outputs . dir ( "src/generated/java/org/apache/kafka/common/message" )
}
task processTestMessages ( type: JavaExec ) {
main = "org.apache.kafka.message.MessageGenerator"
classpath = project ( ':generator' ) . sourceSets . main . runtimeClasspath
args = [ "-p" , "org.apache.kafka.common.message" ,
"-o" , "src/generated-test/java/org/apache/kafka/common/message" ,
"-i" , "src/test/resources/common/message" ,
"-m" , "MessageDataGenerator" , "JsonConverterGenerator"
]
inputs . dir ( "src/test/resources/common/message" )
outputs . dir ( "src/generated-test/java/org/apache/kafka/common/message" )
}
sourceSets {
main {
java {
srcDirs = [ "src/generated/java" , "src/main/java" ]
}
}
test {
java {
srcDirs = [ "src/generated/java" , "src/generated-test/java" , "src/test/java" ]
}
}
}
compileJava . dependsOn 'processMessages'
compileTestJava . dependsOn 'processTestMessages'
javadoc {
include "**/org/apache/kafka/clients/admin/*"
include "**/org/apache/kafka/clients/consumer/*"
include "**/org/apache/kafka/clients/producer/*"
include "**/org/apache/kafka/common/*"
include "**/org/apache/kafka/common/acl/*"
include "**/org/apache/kafka/common/annotation/*"
include "**/org/apache/kafka/common/errors/*"
include "**/org/apache/kafka/common/header/*"
include "**/org/apache/kafka/common/metrics/*"
include "**/org/apache/kafka/common/metrics/stats/*"
include "**/org/apache/kafka/common/quota/*"
include "**/org/apache/kafka/common/resource/*"
include "**/org/apache/kafka/common/serialization/*"
include "**/org/apache/kafka/common/config/*"
include "**/org/apache/kafka/common/config/provider/*"
include "**/org/apache/kafka/common/security/auth/*"
include "**/org/apache/kafka/common/security/plain/*"
include "**/org/apache/kafka/common/security/scram/*"
include "**/org/apache/kafka/common/security/token/delegation/*"
include "**/org/apache/kafka/common/security/oauthbearer/*"
include "**/org/apache/kafka/server/authorizer/*"
include "**/org/apache/kafka/server/policy/*"
include "**/org/apache/kafka/server/quota/*"
include "**/org/apache/kafka/server/log/remote/storage/*"
}
}
project ( ':raft' ) {
archivesBaseName = "kafka-raft"
dependencies {
compile project ( ':clients' )
compile project ( ':metadata' )
compile libs . slf4jApi
compile libs . jacksonDatabind
testCompile project ( ':clients' )
testCompile project ( ':clients' ) . sourceSets . test . output
testCompile libs . junitJupiter
testCompile libs . mockitoCore
testRuntime libs . slf4jlog4j
}
task createVersionFile ( dependsOn: determineCommitId ) {
ext . receiptFile = file ( "$buildDir/kafka/$buildVersionFileName" )
outputs . file receiptFile
outputs . upToDateWhen { false }
doLast {
def data = [
commitId: commitId ,
version: version ,
]
receiptFile . parentFile . mkdirs ( )
def content = data . entrySet ( ) . collect { "$it.key=$it.value" } . sort ( ) . join ( "\n" )
receiptFile . setText ( content , "ISO-8859-1" )
}
}
task processMessages ( type: JavaExec ) {
main = "org.apache.kafka.message.MessageGenerator"
classpath = project ( ':generator' ) . sourceSets . main . runtimeClasspath
args = [ "-p" , "org.apache.kafka.raft.generated" ,
"-o" , "src/generated/java/org/apache/kafka/raft/generated" ,
"-i" , "src/main/resources/common/message" ,
"-m" , "MessageDataGenerator" , "JsonConverterGenerator" ]
inputs . dir ( "src/main/resources/common/message" )
outputs . dir ( "src/generated/java/org/apache/kafka/raft/generated" )
}
sourceSets {
main {
java {
srcDirs = [ "src/generated/java" , "src/main/java" ]
}
}
test {
java {
srcDirs = [ "src/generated/java" , "src/test/java" ]
}
}
}
compileJava . dependsOn 'processMessages'
jar {
dependsOn createVersionFile
from ( "$buildDir" ) {
include "kafka/$buildVersionFileName"
}
}
clean . doFirst {
delete "$buildDir/kafka/"
}
javadoc {
enabled = false
}
}
project ( ':tools' ) {
archivesBaseName = "kafka-tools"
dependencies {
compile project ( ':clients' )
compile project ( ':log4j-appender' )
compile libs . argparse4j
compile libs . jacksonDatabind
compile libs . jacksonJDK8Datatypes
compile libs . slf4jApi
compile libs . jacksonJaxrsJsonProvider
compile libs . jerseyContainerServlet
compile libs . jerseyHk2
compile libs . jaxbApi // Jersey dependency that was available in the JDK before Java 9
compile libs . activation // Jersey dependency that was available in the JDK before Java 9
compile libs . jettyServer
compile libs . jettyServlet
compile libs . jettyServlets
testCompile project ( ':clients' )
testCompile libs . junitJupiter
testCompile project ( ':clients' ) . sourceSets . test . output
testCompile libs . mockitoInline // supports mocking static methods, final classes, etc.
testRuntime libs . slf4jlog4j
}
javadoc {
enabled = false
}
tasks . create ( name: "copyDependantLibs" , type: Copy ) {
from ( configurations . testRuntime ) {
include ( 'slf4j-log4j12*' )
include ( 'log4j*jar' )
}
from ( configurations . runtime ) {
exclude ( 'kafka-clients*' )
}
into "$buildDir/dependant-libs-${versions.scala}"
duplicatesStrategy 'exclude'
}
jar {
dependsOn 'copyDependantLibs'
}
}
project ( ':shell' ) {
archivesBaseName = "kafka-shell"
dependencies {
compile libs . argparse4j
compile libs . jacksonDatabind
compile libs . jacksonJDK8Datatypes
compile libs . jline
compile libs . slf4jApi
compile project ( ':clients' )
compile project ( ':core' )
compile project ( ':log4j-appender' )
compile project ( ':metadata' )
compile project ( ':raft' )
compile libs . jacksonJaxrsJsonProvider
testCompile project ( ':clients' )
testCompile libs . junitJupiter
testRuntime libs . slf4jlog4j
}
javadoc {
enabled = false
}
tasks . create ( name: "copyDependantLibs" , type: Copy ) {
from ( configurations . testRuntime ) {
include ( 'jline-*jar' )
}
from ( configurations . runtime ) {
include ( 'jline-*jar' )
}
into "$buildDir/dependant-libs-${versions.scala}"
duplicatesStrategy 'exclude'
}
jar {
dependsOn 'copyDependantLibs'
}
}
project ( ':streams' ) {
archivesBaseName = "kafka-streams"
ext . buildStreamsVersionFileName = "kafka-streams-version.properties"
dependencies {
compile project ( ':clients' )
// this dependency should be removed after we unify data API
compile ( project ( ':connect:json' ) ) {
// this transitive dependency is not used in Streams, and it breaks SBT builds
exclude module: 'javax.ws.rs-api'
}
compile libs . slf4jApi
compile libs . rocksDBJni
// testCompileOnly prevents streams from exporting a dependency on test-utils, which would cause a dependency cycle
testCompileOnly project ( ':streams:test-utils' )
testCompile project ( ':clients' ) . sourceSets . test . output
testCompile project ( ':core' )
testCompile project ( ':core' ) . sourceSets . test . output
testCompile libs . log4j
testCompile libs . junitJupiterApi
testCompile libs . junitVintageEngine
testCompile libs . easymock
KAFKA-6819: Pt. 1 - Refactor thread-level Streams metrics (#6631)
* StreamsMetricsImpl wraps the Kafka Streams' metrics registry and provides logic to create
and register sensors and their corresponding metrics. An example for such logic can be found in
threadLevelSensor(). Furthermore, StreamsMetricsmpl keeps track of the sensors on the
different levels of an application, i.e., thread, task, etc., and provides logic to remove sensors per
level, e.g., removeAllThreadLevelSensors(). There is one StreamsMetricsImpl object per
application instance.
* ThreadMetrics contains only static methods that specify all built-in thread-level sensors and
metrics and provide logic to register and retrieve those thread-level sensors, e.g., commitSensor().
* From anywhere inside the code base with access to StreamsMetricsImpl, thread-level sensors can be accessed by using ThreadMetrics.
* ThreadsMetrics does not inherit from StreamsMetricsImpl anymore.
Reviewers: A. Sophie Blee-Goldman <sophie@confluent.io>, John Roesler <john@confluent.io>, Guozhang Wang <guozhang@confluent.io>
6 years ago
testCompile libs . powermockJunit4
testCompile libs . powermockEasymock
testCompile libs . bcpkix
testCompile libs . hamcrest
testRuntimeOnly project ( ':streams:test-utils' )
testRuntime libs . slf4jlog4j
}
KAFKA-8868: Generate SubscriptionInfo protocol message (#7248)
Rather than maintain hand coded protocol serialization code, Streams could use the same code-generation framework as Clients/Core.
There isn't a perfect match, since the code generation framework includes an assumption that you're generating "protocol messages", rather than just arbitrary blobs, but I think it's close enough to justify using it, and improving it over time.
Using the code generation allows us to drop a lot of detail-oriented, brittle, and hard-to-maintain serialization logic in favor of a schema spec.
Reviewers: Colin P. McCabe <cmccabe@apache.org>, Boyang Chen <boyang@confluent.io>, Bill Bejeck <bill@confluent.io>, Guozhang Wang <wangguoz@gmail.com>
5 years ago
task processMessages ( type: JavaExec ) {
main = "org.apache.kafka.message.MessageGenerator"
classpath = project ( ':generator' ) . sourceSets . main . runtimeClasspath
args = [ "-p" , "org.apache.kafka.streams.internals.generated" ,
"-o" , "src/generated/java/org/apache/kafka/streams/internals/generated" ,
"-i" , "src/main/resources/common/message" ,
"-m" , "MessageDataGenerator"
]
KAFKA-8868: Generate SubscriptionInfo protocol message (#7248)
Rather than maintain hand coded protocol serialization code, Streams could use the same code-generation framework as Clients/Core.
There isn't a perfect match, since the code generation framework includes an assumption that you're generating "protocol messages", rather than just arbitrary blobs, but I think it's close enough to justify using it, and improving it over time.
Using the code generation allows us to drop a lot of detail-oriented, brittle, and hard-to-maintain serialization logic in favor of a schema spec.
Reviewers: Colin P. McCabe <cmccabe@apache.org>, Boyang Chen <boyang@confluent.io>, Bill Bejeck <bill@confluent.io>, Guozhang Wang <wangguoz@gmail.com>
5 years ago
inputs . dir ( "src/main/resources/common/message" )
outputs . dir ( "src/generated/java/org/apache/kafka/streams/internals/generated" )
}
sourceSets {
main {
java {
srcDirs = [ "src/generated/java" , "src/main/java" ]
}
}
test {
java {
srcDirs = [ "src/generated/java" , "src/test/java" ]
}
}
}
compileJava . dependsOn 'processMessages'
javadoc {
include "**/org/apache/kafka/streams/**"
exclude "**/internals/**"
}
tasks . create ( name: "copyDependantLibs" , type: Copy ) {
from ( configurations . testRuntime ) {
include ( 'slf4j-log4j12*' )
include ( 'log4j*jar' )
include ( '*hamcrest*' )
}
from ( configurations . runtime ) {
exclude ( 'kafka-clients*' )
}
into "$buildDir/dependant-libs-${versions.scala}"
duplicatesStrategy 'exclude'
}
task createStreamsVersionFile ( dependsOn: determineCommitId ) {
ext . receiptFile = file ( "$buildDir/kafka/$buildStreamsVersionFileName" )
outputs . file receiptFile
outputs . upToDateWhen { false }
doLast {
def data = [
commitId: commitId ,
version: version ,
]
receiptFile . parentFile . mkdirs ( )
def content = data . entrySet ( ) . collect { "$it.key=$it.value" } . sort ( ) . join ( "\n" )
receiptFile . setText ( content , "ISO-8859-1" )
}
}
jar {
dependsOn 'createStreamsVersionFile'
from ( "$buildDir" ) {
include "kafka/$buildStreamsVersionFileName"
}
dependsOn 'copyDependantLibs'
}
systemTestLibs {
dependsOn testJar
}
task genStreamsConfigDocs ( type: JavaExec ) {
classpath = sourceSets . main . runtimeClasspath
main = 'org.apache.kafka.streams.StreamsConfig'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "streams_config.html" ) . newOutputStream ( )
}
task testAll (
dependsOn: [
':streams:test' ,
':streams:test-utils:test' ,
':streams:streams-scala:test' ,
':streams:upgrade-system-tests-0100:test' ,
':streams:upgrade-system-tests-0101:test' ,
':streams:upgrade-system-tests-0102:test' ,
':streams:upgrade-system-tests-0110:test' ,
':streams:upgrade-system-tests-10:test' ,
':streams:upgrade-system-tests-11:test' ,
':streams:upgrade-system-tests-20:test' ,
':streams:upgrade-system-tests-21:test' ,
':streams:upgrade-system-tests-22:test' ,
':streams:upgrade-system-tests-23:test' ,
':streams:upgrade-system-tests-24:test' ,
':streams:upgrade-system-tests-25:test' ,
':streams:upgrade-system-tests-26:test' ,
':streams:upgrade-system-tests-27:test' ,
':streams:examples:test'
]
)
}
project ( ':streams:streams-scala' ) {
println "Building project 'streams-scala' with Scala version ${versions.scala}"
apply plugin: 'scala'
archivesBaseName = "kafka-streams-scala_${versions.baseScala}"
dependencies {
compile project ( ':streams' )
compile libs . scalaLibrary
MINOR: Fix Scala 2.13 compiler warnings (#8390)
Once Scala 2.13.2 is officially released, I will submit a follow up PR
that enables `-Xfatal-warnings` with the necessary warning
exclusions. Compiler warning exclusions were only introduced in 2.13.2
and hence why we have to wait for that. I used a snapshot build to
test it in the meantime.
Changes:
* Remove Deprecated annotation from internal request classes
* Class.newInstance is deprecated in favor of
Class.getConstructor().newInstance
* Replace deprecated JavaConversions with CollectionConverters
* Remove unused kafka.cluster.Cluster
* Don't use Map and Set methods deprecated in 2.13:
- collection.Map +, ++, -, --, mapValues, filterKeys, retain
- collection.Set +, ++, -, --
* Add scala-collection-compat dependency to streams-scala and
update version to 2.1.4.
* Replace usages of deprecated Either.get and Either.right
* Replace usage of deprecated Integer(String) constructor
* `import scala.language.implicitConversions` is not needed in Scala 2.13
* Replace usage of deprecated `toIterator`, `Traversable`, `seq`,
`reverseMap`, `hasDefiniteSize`
* Replace usage of deprecated alterConfigs with incrementalAlterConfigs
where possible
* Fix implicit widening conversions from Long/Int to Double/Float
* Avoid implicit conversions to String
* Eliminate usage of deprecated procedure syntax
* Remove `println`in `LogValidatorTest` instead of fixing the compiler
warning since tests should not `println`.
* Eliminate implicit conversion from Array to Seq
* Remove unnecessary usage of 3 argument assertEquals
* Replace `toStream` with `iterator`
* Do not use deprecated SaslConfigs.DEFAULT_SASL_ENABLED_MECHANISMS
* Replace StringBuilder.newBuilder with new StringBuilder
* Rename AclBuffers to AclSeqs and remove usage of `filterKeys`
* More consistent usage of Set/Map in Controller classes: this also fixes
deprecated warnings with Scala 2.13
* Add spotBugs exclusion for inliner artifact in KafkaApis with Scala 2.12.
Reviewers: Manikumar Reddy <manikumar.reddy@gmail.com>, Chia-Ping Tsai <chia7712@gmail.com>
5 years ago
compile libs . scalaCollectionCompat
testCompile project ( ':core' )
testCompile project ( ':core' ) . sourceSets . test . output
testCompile project ( ':streams' ) . sourceSets . test . output
testCompile project ( ':clients' ) . sourceSets . test . output
testCompile project ( ':streams:test-utils' )
testCompile libs . junitJupiterApi
testCompile libs . junitVintageEngine
testCompile libs . scalatest
testCompile libs . easymock
testCompile libs . hamcrest
testRuntime libs . slf4jlog4j
}
javadoc {
include "**/org/apache/kafka/streams/scala/**"
}
tasks . create ( name: "copyDependantLibs" , type: Copy ) {
from ( configurations . runtime ) {
exclude ( 'kafka-streams*' )
}
into "$buildDir/dependant-libs-${versions.scala}"
duplicatesStrategy 'exclude'
}
jar {
dependsOn 'copyDependantLibs'
}
artifacts {
archives scaladocJar
}
test . dependsOn ( ':spotlessScalaCheck' )
}
project ( ':streams:test-utils' ) {
archivesBaseName = "kafka-streams-test-utils"
dependencies {
compile project ( ':streams' )
compile project ( ':clients' )
testCompile project ( ':clients' ) . sourceSets . test . output
testCompile libs . junitJupiter
testCompile libs . easymock
testCompile libs . hamcrest
testRuntime libs . slf4jlog4j
}
javadoc {
include "**/org/apache/kafka/streams/test/**"
exclude "**/internals/**"
}
tasks . create ( name: "copyDependantLibs" , type: Copy ) {
from ( configurations . runtime ) {
exclude ( 'kafka-streams*' )
}
into "$buildDir/dependant-libs-${versions.scala}"
duplicatesStrategy 'exclude'
}
jar {
dependsOn 'copyDependantLibs'
}
}
project ( ':streams:examples' ) {
archivesBaseName = "kafka-streams-examples"
dependencies {
compile project ( ':streams' )
compile project ( ':connect:json' ) // this dependency should be removed after we unify data API
compile libs . slf4jlog4j
testCompile project ( ':streams:test-utils' )
testCompile project ( ':clients' ) . sourceSets . test . output // for org.apache.kafka.test.IntegrationTest
testCompile libs . junitJupiter
testCompile libs . hamcrest
}
javadoc {
enabled = false
}
tasks . create ( name: "copyDependantLibs" , type: Copy ) {
from ( configurations . runtime ) {
exclude ( 'kafka-streams*' )
}
into "$buildDir/dependant-libs-${versions.scala}"
duplicatesStrategy 'exclude'
}
jar {
dependsOn 'copyDependantLibs'
}
}
project ( ':streams:upgrade-system-tests-0100' ) {
archivesBaseName = "kafka-streams-upgrade-system-tests-0100"
dependencies {
testCompile libs . kafkaStreams_0100
testRuntime libs . junitJupiter
}
systemTestLibs {
dependsOn testJar
}
}
project ( ':streams:upgrade-system-tests-0101' ) {
archivesBaseName = "kafka-streams-upgrade-system-tests-0101"
dependencies {
testCompile libs . kafkaStreams_0101
testRuntime libs . junitJupiter
}
systemTestLibs {
dependsOn testJar
}
}
project ( ':streams:upgrade-system-tests-0102' ) {
archivesBaseName = "kafka-streams-upgrade-system-tests-0102"
dependencies {
testCompile libs . kafkaStreams_0102
testRuntime libs . junitJupiter
}
systemTestLibs {
dependsOn testJar
}
}
project ( ':streams:upgrade-system-tests-0110' ) {
archivesBaseName = "kafka-streams-upgrade-system-tests-0110"
dependencies {
testCompile libs . kafkaStreams_0110
testRuntime libs . junitJupiter
}
systemTestLibs {
dependsOn testJar
}
}
project ( ':streams:upgrade-system-tests-10' ) {
archivesBaseName = "kafka-streams-upgrade-system-tests-10"
dependencies {
testCompile libs . kafkaStreams_10
testRuntime libs . junitJupiter
}
systemTestLibs {
dependsOn testJar
}
}
project ( ':streams:upgrade-system-tests-11' ) {
archivesBaseName = "kafka-streams-upgrade-system-tests-11"
dependencies {
testCompile libs . kafkaStreams_11
testRuntime libs . junitJupiter
}
systemTestLibs {
dependsOn testJar
}
}
MINOR: Enable ignored upgrade system tests - trunk (#5605)
Removed ignore annotations from the upgrade tests. This PR includes the following changes for updating the upgrade tests:
* Uploaded new versions 0.10.2.2, 0.11.0.3, 1.0.2, 1.1.1, and 2.0.0 (in the associated scala versions) to kafka-packages
* Update versions in version.py, Dockerfile, base.sh
* Added new versions to StreamsUpgradeTest.test_upgrade_downgrade_brokers including version 2.0.0
* Added new versions StreamsUpgradeTest.test_simple_upgrade_downgrade test excluding version 2.0.0
* Version 2.0.0 is excluded from the streams upgrade/downgrade test as StreamsConfig needs an update for the new version, requiring a KIP. Once the community votes the KIP in, a minor follow-up PR can be pushed to add the 2.0.0 version to the upgrade test.
* Fixed minor bug in kafka-run-class.sh for classpath in upgrade/downgrade tests across versions.
* Follow on PRs for 0.10.2x, 0.11.0x, 1.0.x, 1.1.x, and 2.0.x will be pushed soon with the same updates required for the specific version.
Reviewers: Eno Thereska <eno.thereska@gmail.com>, John Roesler <vvcephei@users.noreply.github.com>, Guozhang Wang <wangguoz@gmail.com>, Matthias J. Sax <matthias@confluent.io>
6 years ago
project ( ':streams:upgrade-system-tests-20' ) {
archivesBaseName = "kafka-streams-upgrade-system-tests-20"
dependencies {
testCompile libs . kafkaStreams_20
testRuntime libs . junitJupiter
MINOR: Enable ignored upgrade system tests - trunk (#5605)
Removed ignore annotations from the upgrade tests. This PR includes the following changes for updating the upgrade tests:
* Uploaded new versions 0.10.2.2, 0.11.0.3, 1.0.2, 1.1.1, and 2.0.0 (in the associated scala versions) to kafka-packages
* Update versions in version.py, Dockerfile, base.sh
* Added new versions to StreamsUpgradeTest.test_upgrade_downgrade_brokers including version 2.0.0
* Added new versions StreamsUpgradeTest.test_simple_upgrade_downgrade test excluding version 2.0.0
* Version 2.0.0 is excluded from the streams upgrade/downgrade test as StreamsConfig needs an update for the new version, requiring a KIP. Once the community votes the KIP in, a minor follow-up PR can be pushed to add the 2.0.0 version to the upgrade test.
* Fixed minor bug in kafka-run-class.sh for classpath in upgrade/downgrade tests across versions.
* Follow on PRs for 0.10.2x, 0.11.0x, 1.0.x, 1.1.x, and 2.0.x will be pushed soon with the same updates required for the specific version.
Reviewers: Eno Thereska <eno.thereska@gmail.com>, John Roesler <vvcephei@users.noreply.github.com>, Guozhang Wang <wangguoz@gmail.com>, Matthias J. Sax <matthias@confluent.io>
6 years ago
}
systemTestLibs {
dependsOn testJar
}
}
project ( ':streams:upgrade-system-tests-21' ) {
archivesBaseName = "kafka-streams-upgrade-system-tests-21"
dependencies {
testCompile libs . kafkaStreams_21
testRuntime libs . junitJupiter
}
systemTestLibs {
dependsOn testJar
}
}
project ( ':streams:upgrade-system-tests-22' ) {
archivesBaseName = "kafka-streams-upgrade-system-tests-22"
dependencies {
testCompile libs . kafkaStreams_22
testRuntime libs . junitJupiter
}
systemTestLibs {
dependsOn testJar
}
}
project ( ':streams:upgrade-system-tests-23' ) {
archivesBaseName = "kafka-streams-upgrade-system-tests-23"
dependencies {
testCompile libs . kafkaStreams_23
testRuntime libs . junitJupiter
}
systemTestLibs {
dependsOn testJar
}
}
project ( ':streams:upgrade-system-tests-24' ) {
archivesBaseName = "kafka-streams-upgrade-system-tests-24"
dependencies {
testCompile libs . kafkaStreams_24
testRuntime libs . junitJupiter
}
systemTestLibs {
dependsOn testJar
}
}
project ( ':streams:upgrade-system-tests-25' ) {
archivesBaseName = "kafka-streams-upgrade-system-tests-25"
dependencies {
testCompile libs . kafkaStreams_25
testRuntime libs . junitJupiter
}
systemTestLibs {
dependsOn testJar
}
}
project ( ':streams:upgrade-system-tests-26' ) {
archivesBaseName = "kafka-streams-upgrade-system-tests-26"
dependencies {
testCompile libs . kafkaStreams_26
testRuntime libs . junitJupiter
}
systemTestLibs {
dependsOn testJar
}
}
project ( ':streams:upgrade-system-tests-27' ) {
archivesBaseName = "kafka-streams-upgrade-system-tests-27"
dependencies {
testCompile libs . kafkaStreams_27
testRuntime libs . junitJupiter
}
systemTestLibs {
dependsOn testJar
}
}
project ( ':jmh-benchmarks' ) {
apply plugin: 'com.github.johnrengelman.shadow'
shadowJar {
baseName = 'kafka-jmh-benchmarks-all'
classifier = null
version = null
}
dependencies {
compile ( project ( ':core' ) ) {
// jmh requires jopt 4.x while `core` depends on 5.0, they are not binary compatible
exclude group: 'net.sf.jopt-simple' , module: 'jopt-simple'
}
compile project ( ':clients' )
compile project ( ':metadata' )
compile project ( ':streams' )
compile project ( ':core' )
compile project ( ':clients' ) . sourceSets . test . output
compile project ( ':core' ) . sourceSets . test . output
compile libs . jmhCore
annotationProcessor libs . jmhGeneratorAnnProcess
compile libs . jmhCoreBenchmarks
compile libs . mockitoCore
compile libs . slf4jlog4j
}
tasks . withType ( JavaCompile ) {
// Suppress warning caused by code generated by jmh: `warning: [cast] redundant cast to long`
options . compilerArgs < < "-Xlint:-cast"
}
jar {
manifest {
attributes "Main-Class" : "org.openjdk.jmh.Main"
}
}
KAFKA-8841; Reduce overhead of ReplicaManager.updateFollowerFetchState (#7324)
This PR makes two changes to code in the ReplicaManager.updateFollowerFetchState path, which is in the hot path for follower fetches. Although calling ReplicaManager.updateFollowerFetch state is inexpensive on its own, it is called once for each partition every time a follower fetch occurs.
1. updateFollowerFetchState no longer calls maybeExpandIsr when the follower is already in the ISR. This avoid repeated expansion checks.
2. Partition.maybeIncrementLeaderHW is also in the hot path for ReplicaManager.updateFollowerFetchState. Partition.maybeIncrementLeaderHW calls Partition.remoteReplicas four times each iteration, and it performs a toSet conversion. maybeIncrementLeaderHW now avoids generating any intermediate collections when updating the HWM.
**Benchmark results for Partition.updateFollowerFetchState on a r5.xlarge:**
Old:
```
1288.633 ±(99.9%) 1.170 ns/op [Average]
(min, avg, max) = (1287.343, 1288.633, 1290.398), stdev = 1.037
CI (99.9%): [1287.463, 1289.802] (assumes normal distribution)
```
New (when follower fetch offset is updated):
```
261.727 ±(99.9%) 0.122 ns/op [Average]
(min, avg, max) = (261.565, 261.727, 261.937), stdev = 0.114
CI (99.9%): [261.605, 261.848] (assumes normal distribution)
```
New (when follower fetch offset is the same):
```
68.484 ±(99.9%) 0.025 ns/op [Average]
(min, avg, max) = (68.446, 68.484, 68.520), stdev = 0.023
CI (99.9%): [68.460, 68.509] (assumes normal distribution)
```
Reviewers: Ismael Juma <ismael@juma.me.uk>, Jason Gustafson <jason@confluent.io>
5 years ago
checkstyle {
configProperties = checkstyleConfigProperties ( "import-control-jmh-benchmarks.xml" )
}
task jmh ( type: JavaExec , dependsOn: [ ':jmh-benchmarks:clean' , ':jmh-benchmarks:shadowJar' ] ) {
main = "-jar"
doFirst {
if ( System . getProperty ( "jmhArgs" ) ) {
args System . getProperty ( "jmhArgs" ) . split ( ' ' )
}
args = [ shadowJar . archivePath , * args ]
}
}
javadoc {
enabled = false
}
}
project ( ':log4j-appender' ) {
archivesBaseName = "kafka-log4j-appender"
dependencies {
compile project ( ':clients' )
compile libs . slf4jlog4j
testCompile project ( ':clients' ) . sourceSets . test . output
testCompile libs . junitJupiter
testCompile libs . hamcrest
testCompile libs . easymock
}
javadoc {
enabled = false
}
}
KAFKA-2366; Initial patch for Copycat
This is an initial patch implementing the basics of Copycat for KIP-26.
The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version:
* Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state).
* This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works.
* This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal.
* This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions.
* The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies.
* There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review.
Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs.
Author: Ewen Cheslack-Postava <me@ewencp.org>
Reviewers: Ismael Juma, Gwen Shapira
Closes #99 from ewencp/copycat and squashes the following commits:
a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException.
8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator.
7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode.
656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface.
c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible.
d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory.
b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments.
6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change.
b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value.
0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate.
e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package.
be5c387 [Ewen Cheslack-Postava] Minor cleanup
122423e [Ewen Cheslack-Postava] Style cleanup
6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch.
4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling.
25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly.
0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat.
220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer.
1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support.
e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation.
dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core.
4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages.
31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat.
e14942c [Ewen Cheslack-Postava] Add Copycat file connector.
0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime
11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
project ( ':connect:api' ) {
archivesBaseName = "connect-api"
KAFKA-2366; Initial patch for Copycat
This is an initial patch implementing the basics of Copycat for KIP-26.
The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version:
* Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state).
* This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works.
* This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal.
* This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions.
* The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies.
* There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review.
Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs.
Author: Ewen Cheslack-Postava <me@ewencp.org>
Reviewers: Ismael Juma, Gwen Shapira
Closes #99 from ewencp/copycat and squashes the following commits:
a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException.
8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator.
7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode.
656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface.
c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible.
d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory.
b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments.
6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change.
b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value.
0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate.
e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package.
be5c387 [Ewen Cheslack-Postava] Minor cleanup
122423e [Ewen Cheslack-Postava] Style cleanup
6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch.
4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling.
25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly.
0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat.
220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer.
1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support.
e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation.
dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core.
4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages.
31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat.
e14942c [Ewen Cheslack-Postava] Add Copycat file connector.
0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime
11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
dependencies {
compile project ( ':clients' )
compile libs . slf4jApi
compile libs . jaxrsApi
testCompile libs . junitJupiter
testRuntime libs . slf4jlog4j
testCompile project ( ':clients' ) . sourceSets . test . output
KAFKA-2366; Initial patch for Copycat
This is an initial patch implementing the basics of Copycat for KIP-26.
The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version:
* Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state).
* This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works.
* This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal.
* This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions.
* The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies.
* There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review.
Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs.
Author: Ewen Cheslack-Postava <me@ewencp.org>
Reviewers: Ismael Juma, Gwen Shapira
Closes #99 from ewencp/copycat and squashes the following commits:
a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException.
8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator.
7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode.
656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface.
c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible.
d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory.
b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments.
6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change.
b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value.
0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate.
e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package.
be5c387 [Ewen Cheslack-Postava] Minor cleanup
122423e [Ewen Cheslack-Postava] Style cleanup
6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch.
4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling.
25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly.
0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat.
220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer.
1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support.
e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation.
dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core.
4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages.
31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat.
e14942c [Ewen Cheslack-Postava] Add Copycat file connector.
0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime
11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
}
javadoc {
include "**/org/apache/kafka/connect/**" // needed for the `aggregatedJavadoc` task
// The URL structure was changed to include the locale after Java 8
if ( JavaVersion . current ( ) . isJava11Compatible ( ) )
options . links "https://docs.oracle.com/en/java/javase/${JavaVersion.current().majorVersion}/docs/api/"
else
options . links "https://docs.oracle.com/javase/8/docs/api/"
KAFKA-2366; Initial patch for Copycat
This is an initial patch implementing the basics of Copycat for KIP-26.
The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version:
* Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state).
* This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works.
* This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal.
* This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions.
* The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies.
* There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review.
Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs.
Author: Ewen Cheslack-Postava <me@ewencp.org>
Reviewers: Ismael Juma, Gwen Shapira
Closes #99 from ewencp/copycat and squashes the following commits:
a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException.
8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator.
7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode.
656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface.
c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible.
d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory.
b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments.
6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change.
b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value.
0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate.
e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package.
be5c387 [Ewen Cheslack-Postava] Minor cleanup
122423e [Ewen Cheslack-Postava] Style cleanup
6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch.
4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling.
25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly.
0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat.
220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer.
1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support.
e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation.
dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core.
4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages.
31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat.
e14942c [Ewen Cheslack-Postava] Add Copycat file connector.
0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime
11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
}
tasks . create ( name: "copyDependantLibs" , type: Copy ) {
from ( configurations . testRuntime ) {
include ( 'slf4j-log4j12*' )
include ( 'log4j*jar' )
}
from ( configurations . runtime ) {
exclude ( 'kafka-clients*' )
exclude ( 'connect-*' )
}
into "$buildDir/dependant-libs"
duplicatesStrategy 'exclude'
}
jar {
dependsOn copyDependantLibs
}
KAFKA-2366; Initial patch for Copycat
This is an initial patch implementing the basics of Copycat for KIP-26.
The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version:
* Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state).
* This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works.
* This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal.
* This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions.
* The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies.
* There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review.
Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs.
Author: Ewen Cheslack-Postava <me@ewencp.org>
Reviewers: Ismael Juma, Gwen Shapira
Closes #99 from ewencp/copycat and squashes the following commits:
a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException.
8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator.
7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode.
656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface.
c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible.
d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory.
b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments.
6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change.
b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value.
0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate.
e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package.
be5c387 [Ewen Cheslack-Postava] Minor cleanup
122423e [Ewen Cheslack-Postava] Style cleanup
6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch.
4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling.
25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly.
0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat.
220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer.
1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support.
e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation.
dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core.
4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages.
31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat.
e14942c [Ewen Cheslack-Postava] Add Copycat file connector.
0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime
11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
}
project ( ':connect:transforms' ) {
archivesBaseName = "connect-transforms"
dependencies {
compile project ( ':connect:api' )
compile libs . slf4jApi
testCompile libs . easymock
testCompile libs . junitJupiter
testRuntime libs . slf4jlog4j
testCompile project ( ':clients' ) . sourceSets . test . output
}
javadoc {
enabled = false
}
tasks . create ( name: "copyDependantLibs" , type: Copy ) {
from ( configurations . testRuntime ) {
include ( 'slf4j-log4j12*' )
include ( 'log4j*jar' )
}
from ( configurations . runtime ) {
exclude ( 'kafka-clients*' )
exclude ( 'connect-*' )
}
into "$buildDir/dependant-libs"
duplicatesStrategy 'exclude'
}
jar {
dependsOn copyDependantLibs
}
}
project ( ':connect:json' ) {
archivesBaseName = "connect-json"
KAFKA-2366; Initial patch for Copycat
This is an initial patch implementing the basics of Copycat for KIP-26.
The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version:
* Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state).
* This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works.
* This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal.
* This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions.
* The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies.
* There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review.
Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs.
Author: Ewen Cheslack-Postava <me@ewencp.org>
Reviewers: Ismael Juma, Gwen Shapira
Closes #99 from ewencp/copycat and squashes the following commits:
a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException.
8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator.
7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode.
656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface.
c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible.
d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory.
b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments.
6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change.
b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value.
0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate.
e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package.
be5c387 [Ewen Cheslack-Postava] Minor cleanup
122423e [Ewen Cheslack-Postava] Style cleanup
6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch.
4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling.
25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly.
0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat.
220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer.
1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support.
e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation.
dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core.
4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages.
31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat.
e14942c [Ewen Cheslack-Postava] Add Copycat file connector.
0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime
11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
dependencies {
compile project ( ':connect:api' )
compile libs . jacksonDatabind
compile libs . jacksonJDK8Datatypes
compile libs . slf4jApi
KAFKA-2366; Initial patch for Copycat
This is an initial patch implementing the basics of Copycat for KIP-26.
The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version:
* Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state).
* This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works.
* This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal.
* This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions.
* The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies.
* There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review.
Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs.
Author: Ewen Cheslack-Postava <me@ewencp.org>
Reviewers: Ismael Juma, Gwen Shapira
Closes #99 from ewencp/copycat and squashes the following commits:
a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException.
8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator.
7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode.
656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface.
c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible.
d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory.
b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments.
6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change.
b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value.
0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate.
e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package.
be5c387 [Ewen Cheslack-Postava] Minor cleanup
122423e [Ewen Cheslack-Postava] Style cleanup
6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch.
4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling.
25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly.
0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat.
220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer.
1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support.
e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation.
dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core.
4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages.
31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat.
e14942c [Ewen Cheslack-Postava] Add Copycat file connector.
0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime
11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
testCompile libs . easymock
testCompile libs . junitJupiter
testRuntime libs . slf4jlog4j
testCompile project ( ':clients' ) . sourceSets . test . output
KAFKA-2366; Initial patch for Copycat
This is an initial patch implementing the basics of Copycat for KIP-26.
The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version:
* Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state).
* This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works.
* This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal.
* This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions.
* The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies.
* There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review.
Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs.
Author: Ewen Cheslack-Postava <me@ewencp.org>
Reviewers: Ismael Juma, Gwen Shapira
Closes #99 from ewencp/copycat and squashes the following commits:
a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException.
8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator.
7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode.
656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface.
c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible.
d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory.
b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments.
6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change.
b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value.
0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate.
e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package.
be5c387 [Ewen Cheslack-Postava] Minor cleanup
122423e [Ewen Cheslack-Postava] Style cleanup
6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch.
4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling.
25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly.
0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat.
220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer.
1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support.
e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation.
dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core.
4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages.
31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat.
e14942c [Ewen Cheslack-Postava] Add Copycat file connector.
0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime
11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
}
javadoc {
enabled = false
KAFKA-2366; Initial patch for Copycat
This is an initial patch implementing the basics of Copycat for KIP-26.
The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version:
* Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state).
* This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works.
* This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal.
* This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions.
* The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies.
* There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review.
Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs.
Author: Ewen Cheslack-Postava <me@ewencp.org>
Reviewers: Ismael Juma, Gwen Shapira
Closes #99 from ewencp/copycat and squashes the following commits:
a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException.
8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator.
7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode.
656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface.
c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible.
d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory.
b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments.
6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change.
b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value.
0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate.
e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package.
be5c387 [Ewen Cheslack-Postava] Minor cleanup
122423e [Ewen Cheslack-Postava] Style cleanup
6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch.
4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling.
25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly.
0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat.
220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer.
1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support.
e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation.
dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core.
4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages.
31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat.
e14942c [Ewen Cheslack-Postava] Add Copycat file connector.
0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime
11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
}
tasks . create ( name: "copyDependantLibs" , type: Copy ) {
from ( configurations . testRuntime ) {
include ( 'slf4j-log4j12*' )
include ( 'log4j*jar' )
}
from ( configurations . runtime ) {
exclude ( 'kafka-clients*' )
exclude ( 'connect-*' )
}
into "$buildDir/dependant-libs"
duplicatesStrategy 'exclude'
}
jar {
dependsOn copyDependantLibs
}
KAFKA-2366; Initial patch for Copycat
This is an initial patch implementing the basics of Copycat for KIP-26.
The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version:
* Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state).
* This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works.
* This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal.
* This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions.
* The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies.
* There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review.
Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs.
Author: Ewen Cheslack-Postava <me@ewencp.org>
Reviewers: Ismael Juma, Gwen Shapira
Closes #99 from ewencp/copycat and squashes the following commits:
a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException.
8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator.
7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode.
656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface.
c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible.
d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory.
b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments.
6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change.
b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value.
0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate.
e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package.
be5c387 [Ewen Cheslack-Postava] Minor cleanup
122423e [Ewen Cheslack-Postava] Style cleanup
6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch.
4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling.
25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly.
0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat.
220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer.
1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support.
e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation.
dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core.
4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages.
31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat.
e14942c [Ewen Cheslack-Postava] Add Copycat file connector.
0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime
11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
}
project ( ':connect:runtime' ) {
archivesBaseName = "connect-runtime"
KAFKA-2366; Initial patch for Copycat
This is an initial patch implementing the basics of Copycat for KIP-26.
The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version:
* Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state).
* This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works.
* This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal.
* This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions.
* The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies.
* There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review.
Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs.
Author: Ewen Cheslack-Postava <me@ewencp.org>
Reviewers: Ismael Juma, Gwen Shapira
Closes #99 from ewencp/copycat and squashes the following commits:
a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException.
8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator.
7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode.
656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface.
c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible.
d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory.
b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments.
6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change.
b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value.
0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate.
e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package.
be5c387 [Ewen Cheslack-Postava] Minor cleanup
122423e [Ewen Cheslack-Postava] Style cleanup
6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch.
4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling.
25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly.
0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat.
220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer.
1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support.
e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation.
dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core.
4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages.
31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat.
e14942c [Ewen Cheslack-Postava] Add Copycat file connector.
0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime
11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
dependencies {
compile project ( ':connect:api' )
KAFKA-2366; Initial patch for Copycat
This is an initial patch implementing the basics of Copycat for KIP-26.
The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version:
* Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state).
* This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works.
* This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal.
* This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions.
* The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies.
* There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review.
Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs.
Author: Ewen Cheslack-Postava <me@ewencp.org>
Reviewers: Ismael Juma, Gwen Shapira
Closes #99 from ewencp/copycat and squashes the following commits:
a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException.
8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator.
7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode.
656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface.
c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible.
d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory.
b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments.
6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change.
b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value.
0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate.
e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package.
be5c387 [Ewen Cheslack-Postava] Minor cleanup
122423e [Ewen Cheslack-Postava] Style cleanup
6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch.
4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling.
25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly.
0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat.
220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer.
1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support.
e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation.
dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core.
4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages.
31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat.
e14942c [Ewen Cheslack-Postava] Add Copycat file connector.
0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime
11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
compile project ( ':clients' )
compile project ( ':tools' )
compile project ( ':connect:json' )
compile project ( ':connect:transforms' )
compile libs . slf4jApi
compile libs . jacksonJaxrsJsonProvider
compile libs . jerseyContainerServlet
compile libs . jerseyHk2
compile libs . jaxbApi // Jersey dependency that was available in the JDK before Java 9
compile libs . activation // Jersey dependency that was available in the JDK before Java 9
compile libs . jettyServer
compile libs . jettyServlet
compile libs . jettyServlets
compile libs . jettyClient
compile ( libs . reflections )
compile ( libs . mavenArtifact )
testCompile project ( ':clients' ) . sourceSets . test . output
testCompile libs . easymock
testCompile libs . junitJupiterApi
testCompile libs . junitVintageEngine
testCompile libs . powermockJunit4
testCompile libs . powermockEasymock
testCompile libs . mockitoCore
testCompile libs . httpclient
testCompile project ( ':clients' ) . sourceSets . test . output
testCompile project ( ':core' )
testCompile project ( ':core' ) . sourceSets . test . output
testRuntime libs . slf4jlog4j
KAFKA-2366; Initial patch for Copycat
This is an initial patch implementing the basics of Copycat for KIP-26.
The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version:
* Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state).
* This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works.
* This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal.
* This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions.
* The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies.
* There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review.
Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs.
Author: Ewen Cheslack-Postava <me@ewencp.org>
Reviewers: Ismael Juma, Gwen Shapira
Closes #99 from ewencp/copycat and squashes the following commits:
a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException.
8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator.
7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode.
656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface.
c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible.
d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory.
b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments.
6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change.
b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value.
0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate.
e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package.
be5c387 [Ewen Cheslack-Postava] Minor cleanup
122423e [Ewen Cheslack-Postava] Style cleanup
6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch.
4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling.
25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly.
0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat.
220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer.
1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support.
e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation.
dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core.
4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages.
31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat.
e14942c [Ewen Cheslack-Postava] Add Copycat file connector.
0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime
11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
}
javadoc {
enabled = false
KAFKA-2366; Initial patch for Copycat
This is an initial patch implementing the basics of Copycat for KIP-26.
The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version:
* Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state).
* This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works.
* This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal.
* This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions.
* The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies.
* There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review.
Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs.
Author: Ewen Cheslack-Postava <me@ewencp.org>
Reviewers: Ismael Juma, Gwen Shapira
Closes #99 from ewencp/copycat and squashes the following commits:
a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException.
8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator.
7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode.
656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface.
c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible.
d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory.
b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments.
6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change.
b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value.
0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate.
e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package.
be5c387 [Ewen Cheslack-Postava] Minor cleanup
122423e [Ewen Cheslack-Postava] Style cleanup
6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch.
4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling.
25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly.
0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat.
220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer.
1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support.
e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation.
dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core.
4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages.
31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat.
e14942c [Ewen Cheslack-Postava] Add Copycat file connector.
0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime
11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
}
tasks . create ( name: "copyDependantLibs" , type: Copy ) {
from ( configurations . testRuntime ) {
include ( 'slf4j-log4j12*' )
include ( 'log4j*jar' )
}
from ( configurations . runtime ) {
exclude ( 'kafka-clients*' )
exclude ( 'connect-*' )
}
into "$buildDir/dependant-libs"
duplicatesStrategy 'exclude'
}
jar {
dependsOn copyDependantLibs
}
task genConnectConfigDocs ( type: JavaExec ) {
classpath = sourceSets . main . runtimeClasspath
main = 'org.apache.kafka.connect.runtime.distributed.DistributedConfig'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "connect_config.html" ) . newOutputStream ( )
}
task genSinkConnectorConfigDocs ( type: JavaExec ) {
classpath = sourceSets . main . runtimeClasspath
main = 'org.apache.kafka.connect.runtime.SinkConnectorConfig'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "sink_connector_config.html" ) . newOutputStream ( )
}
task genSourceConnectorConfigDocs ( type: JavaExec ) {
classpath = sourceSets . main . runtimeClasspath
main = 'org.apache.kafka.connect.runtime.SourceConnectorConfig'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "source_connector_config.html" ) . newOutputStream ( )
}
task genConnectTransformationDocs ( type: JavaExec ) {
classpath = sourceSets . main . runtimeClasspath
main = 'org.apache.kafka.connect.tools.TransformationDoc'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "connect_transforms.html" ) . newOutputStream ( )
}
task genConnectPredicateDocs ( type: JavaExec ) {
classpath = sourceSets . main . runtimeClasspath
main = 'org.apache.kafka.connect.tools.PredicateDoc'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "connect_predicates.html" ) . newOutputStream ( )
}
task genConnectMetricsDocs ( type: JavaExec ) {
classpath = sourceSets . test . runtimeClasspath
main = 'org.apache.kafka.connect.runtime.ConnectMetrics'
if ( ! generatedDocsDir . exists ( ) ) { generatedDocsDir . mkdirs ( ) }
standardOutput = new File ( generatedDocsDir , "connect_metrics.html" ) . newOutputStream ( )
}
KAFKA-2366; Initial patch for Copycat
This is an initial patch implementing the basics of Copycat for KIP-26.
The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version:
* Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state).
* This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works.
* This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal.
* This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions.
* The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies.
* There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review.
Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs.
Author: Ewen Cheslack-Postava <me@ewencp.org>
Reviewers: Ismael Juma, Gwen Shapira
Closes #99 from ewencp/copycat and squashes the following commits:
a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException.
8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator.
7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode.
656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface.
c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible.
d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory.
b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments.
6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change.
b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value.
0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate.
e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package.
be5c387 [Ewen Cheslack-Postava] Minor cleanup
122423e [Ewen Cheslack-Postava] Style cleanup
6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch.
4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling.
25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly.
0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat.
220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer.
1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support.
e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation.
dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core.
4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages.
31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat.
e14942c [Ewen Cheslack-Postava] Add Copycat file connector.
0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime
11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
}
project ( ':connect:file' ) {
archivesBaseName = "connect-file"
KAFKA-2366; Initial patch for Copycat
This is an initial patch implementing the basics of Copycat for KIP-26.
The intent here is to start a review of the key pieces of the core API and get a reasonably functional, baseline, non-distributed implementation of Copycat in place to get things rolling. The current patch has a number of known issues that need to be addressed before a final version:
* Some build-related issues. Specifically, requires some locally-installed dependencies (see below), ignores checkstyle for the runtime data library because it's lifted from Avro currently and likely won't last in its current form, and some Gradle task dependencies aren't quite right because I haven't gotten rid of the dependency on `core` (which should now be an easy patch since new consumer groups are in a much better state).
* This patch currently depends on some Confluent trunk code because I prototyped with our Avro serializers w/ schema-registry support. We need to figure out what we want to provide as an example built-in set of serializers. Unlike core Kafka where we could ignore the issue, providing only ByteArray or String serializers, this is pretty central to how Copycat works.
* This patch uses a hacked up version of Avro as its runtime data format. Not sure if we want to go through the entire API discussion just to get some basic code committed, so I filed KAFKA-2367 to handle that separately. The core connector APIs and the runtime data APIs are entirely orthogonal.
* This patch needs some updates to get aligned with recent new consumer changes (specifically, I'm aware of the ConcurrentModificationException issue on exit). More generally, the new consumer is in flux but Copycat depends on it, so there are likely to be some negative interactions.
* The layout feels a bit awkward to me right now because I ported it from a Maven layout. We don't have nearly the same level of granularity in Kafka currently (core and clients, plus the mostly ignored examples, log4j-appender, and a couple of contribs). We might want to reorganize, although keeping data+api separate from runtime and connector plugins is useful for minimizing dependencies.
* There are a variety of other things (e.g., I'm not happy with the exception hierarchy/how they are currently handled, TopicPartition doesn't really need to be duplicated unless we want Copycat entirely isolated from the Kafka APIs, etc), but I expect those we'll cover in the review.
Before commenting on the patch, it's probably worth reviewing https://issues.apache.org/jira/browse/KAFKA-2365 and https://issues.apache.org/jira/browse/KAFKA-2366 to get an idea of what I had in mind for a) what we ultimately want with all the Copycat patches and b) what we aim to cover in this initial patch. My hope is that we can use a WIP patch (after the current obvious deficiencies are addressed) while recognizing that we want to make iterative progress with a bunch of subsequent PRs.
Author: Ewen Cheslack-Postava <me@ewencp.org>
Reviewers: Ismael Juma, Gwen Shapira
Closes #99 from ewencp/copycat and squashes the following commits:
a3a47a6 [Ewen Cheslack-Postava] Simplify Copycat exceptions, make them a subclass of KafkaException.
8c108b0 [Ewen Cheslack-Postava] Rename Coordinator to Herder to avoid confusion with the consumer coordinator.
7bf8075 [Ewen Cheslack-Postava] Make Copycat CLI speific to standalone mode, clean up some config and get rid of config storage in standalone mode.
656a003 [Ewen Cheslack-Postava] Clarify and expand the explanation of the Copycat Coordinator interface.
c0e5fdc [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
0fa7a36 [Ewen Cheslack-Postava] Mark Copycat classes as unstable and reduce visibility of some classes where possible.
d55d31e [Ewen Cheslack-Postava] Reorganize Copycat code to put it all under one top-level directory.
b29cb2c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
d713a21 [Ewen Cheslack-Postava] Address Gwen's review comments.
6787a85 [Ewen Cheslack-Postava] Make Converter generic to match serializers since some serialization formats do not require a base class of Object; update many other classes to have generic key and value class type parameters to match this change.
b194c73 [Ewen Cheslack-Postava] Split Copycat converter option into two options for key and value.
0b5a1a0 [Ewen Cheslack-Postava] Normalize naming to use partition for both source and Kafka, adjusting naming in CopycatRecord classes to clearly differentiate.
e345142 [Ewen Cheslack-Postava] Remove Copycat reflection utils, use existing Utils and ConfigDef functionality from clients package.
be5c387 [Ewen Cheslack-Postava] Minor cleanup
122423e [Ewen Cheslack-Postava] Style cleanup
6ba87de [Ewen Cheslack-Postava] Remove most of the Avro-based mock runtime data API, only preserving enough schema functionality to support basic primitive types for an initial patch.
4674d13 [Ewen Cheslack-Postava] Address review comments, clean up some code styling.
25b5739 [Ewen Cheslack-Postava] Fix sink task offset commit concurrency issue by moving it to the worker thread and waking up the consumer to ensure it exits promptly.
0aefe21 [Ewen Cheslack-Postava] Add log4j settings for Copycat.
220e42d [Ewen Cheslack-Postava] Replace Avro serializer with JSON serializer.
1243a7c [Ewen Cheslack-Postava] Merge remote-tracking branch 'origin/trunk' into copycat
5a618c6 [Ewen Cheslack-Postava] Remove offset serializers, instead reusing the existing serializers and removing schema projection support.
e849e10 [Ewen Cheslack-Postava] Remove duplicated TopicPartition implementation.
dec1379 [Ewen Cheslack-Postava] Switch to using new consumer coordinator instead of manually assigning partitions. Remove dependency of copycat-runtime on core.
4a9b4f3 [Ewen Cheslack-Postava] Add some helpful Copycat-specific build and test targets that cover all Copycat packages.
31cd1ca [Ewen Cheslack-Postava] Add CLI tools for Copycat.
e14942c [Ewen Cheslack-Postava] Add Copycat file connector.
0233456 [Ewen Cheslack-Postava] Add copycat-avro and copycat-runtime
11981d2 [Ewen Cheslack-Postava] Add copycat-data and copycat-api
9 years ago
dependencies {
compile project ( ':connect:api' )
compile libs . slf4jApi
testCompile libs . easymock
testCompile libs . junitJupiter
testRuntime libs . slf4jlog4j
testCompile project ( ':clients' ) . sourceSets . test . output
}
javadoc {
enabled = false
}
tasks . create ( name: "copyDependantLibs" , type: Copy ) {
from ( configurations . testRuntime ) {
include ( 'slf4j-log4j12*' )
include ( 'log4j*jar' )
}
from ( configurations . runtime ) {
exclude ( 'kafka-clients*' )
exclude ( 'connect-*' )
}
into "$buildDir/dependant-libs"
duplicatesStrategy 'exclude'
}
jar {
dependsOn copyDependantLibs
}
}
project ( ':connect:basic-auth-extension' ) {
archivesBaseName = "connect-basic-auth-extension"
dependencies {
compile project ( ':connect:api' )
compile libs . slf4jApi
testCompile libs . bcpkix
testCompile libs . easymock
testCompile libs . junitJupiter
testCompile project ( ':clients' ) . sourceSets . test . output
testRuntime libs . slf4jlog4j
testRuntime libs . jerseyContainerServlet
}
javadoc {
enabled = false
}
KAFKA-7500: MirrorMaker 2.0 (KIP-382)
Implementation of [KIP-382 "MirrorMaker 2.0"](https://cwiki.apache.org/confluence/display/KAFKA/KIP-382%3A+MirrorMaker+2.0)
Author: Ryanne Dolan <ryannedolan@gmail.com>
Author: Arun Mathew <arunmathew88@gmail.com>
Author: In Park <inpark@cloudera.com>
Author: Andre Price <obsoleted@users.noreply.github.com>
Author: christian.hagel@rio.cloud <christian.hagel@rio.cloud>
Reviewers: Eno Thereska <eno.thereska@gmail.com>, William Hammond <william.t.hammond@gmail.com>, Viktor Somogyi <viktorsomogyi@gmail.com>, Jakub Korzeniowski, Tim Carey-Smith, Kamal Chandraprakash <kamal.chandraprakash@gmail.com>, Arun Mathew, Jeremy-l-ford, vpernin, Oleg Kasian <oleg.kasian@gmail.com>, Mickael Maison <mickael.maison@gmail.com>, Qihong Chen, Sriharsha Chintalapani <sriharsha@apache.org>, Jun Rao <junrao@gmail.com>, Randall Hauch <rhauch@gmail.com>, Manikumar Reddy <manikumar.reddy@gmail.com>, Ismael Juma <ismael@juma.me.uk>
Closes #6295 from ryannedolan/KIP-382
5 years ago
tasks . create ( name: "copyDependantLibs" , type: Copy ) {
from ( configurations . testRuntime ) {
include ( 'slf4j-log4j12*' )
include ( 'log4j*jar' )
}
from ( configurations . runtime ) {
exclude ( 'kafka-clients*' )
exclude ( 'connect-*' )
}
into "$buildDir/dependant-libs"
duplicatesStrategy 'exclude'
}
jar {
dependsOn copyDependantLibs
}
}
project ( ':connect:mirror' ) {
archivesBaseName = "connect-mirror"
dependencies {
compile project ( ':connect:api' )
compile project ( ':connect:runtime' )
compile project ( ':connect:mirror-client' )
compile project ( ':clients' )
compile libs . argparse4j
compile libs . slf4jApi
testCompile libs . junitJupiter
testCompile libs . mockitoCore
KAFKA-7500: MirrorMaker 2.0 (KIP-382)
Implementation of [KIP-382 "MirrorMaker 2.0"](https://cwiki.apache.org/confluence/display/KAFKA/KIP-382%3A+MirrorMaker+2.0)
Author: Ryanne Dolan <ryannedolan@gmail.com>
Author: Arun Mathew <arunmathew88@gmail.com>
Author: In Park <inpark@cloudera.com>
Author: Andre Price <obsoleted@users.noreply.github.com>
Author: christian.hagel@rio.cloud <christian.hagel@rio.cloud>
Reviewers: Eno Thereska <eno.thereska@gmail.com>, William Hammond <william.t.hammond@gmail.com>, Viktor Somogyi <viktorsomogyi@gmail.com>, Jakub Korzeniowski, Tim Carey-Smith, Kamal Chandraprakash <kamal.chandraprakash@gmail.com>, Arun Mathew, Jeremy-l-ford, vpernin, Oleg Kasian <oleg.kasian@gmail.com>, Mickael Maison <mickael.maison@gmail.com>, Qihong Chen, Sriharsha Chintalapani <sriharsha@apache.org>, Jun Rao <junrao@gmail.com>, Randall Hauch <rhauch@gmail.com>, Manikumar Reddy <manikumar.reddy@gmail.com>, Ismael Juma <ismael@juma.me.uk>
Closes #6295 from ryannedolan/KIP-382
5 years ago
testCompile project ( ':clients' ) . sourceSets . test . output
testCompile project ( ':connect:runtime' ) . sourceSets . test . output
testCompile project ( ':core' )
testCompile project ( ':core' ) . sourceSets . test . output
testRuntime project ( ':connect:runtime' )
testRuntime libs . slf4jlog4j
testRuntime libs . bcpkix
KAFKA-7500: MirrorMaker 2.0 (KIP-382)
Implementation of [KIP-382 "MirrorMaker 2.0"](https://cwiki.apache.org/confluence/display/KAFKA/KIP-382%3A+MirrorMaker+2.0)
Author: Ryanne Dolan <ryannedolan@gmail.com>
Author: Arun Mathew <arunmathew88@gmail.com>
Author: In Park <inpark@cloudera.com>
Author: Andre Price <obsoleted@users.noreply.github.com>
Author: christian.hagel@rio.cloud <christian.hagel@rio.cloud>
Reviewers: Eno Thereska <eno.thereska@gmail.com>, William Hammond <william.t.hammond@gmail.com>, Viktor Somogyi <viktorsomogyi@gmail.com>, Jakub Korzeniowski, Tim Carey-Smith, Kamal Chandraprakash <kamal.chandraprakash@gmail.com>, Arun Mathew, Jeremy-l-ford, vpernin, Oleg Kasian <oleg.kasian@gmail.com>, Mickael Maison <mickael.maison@gmail.com>, Qihong Chen, Sriharsha Chintalapani <sriharsha@apache.org>, Jun Rao <junrao@gmail.com>, Randall Hauch <rhauch@gmail.com>, Manikumar Reddy <manikumar.reddy@gmail.com>, Ismael Juma <ismael@juma.me.uk>
Closes #6295 from ryannedolan/KIP-382
5 years ago
}
javadoc {
enabled = false
}
tasks . create ( name: "copyDependantLibs" , type: Copy ) {
from ( configurations . testRuntime ) {
include ( 'slf4j-log4j12*' )
include ( 'log4j*jar' )
}
from ( configurations . runtime ) {
exclude ( 'kafka-clients*' )
exclude ( 'connect-*' )
}
into "$buildDir/dependant-libs"
duplicatesStrategy 'exclude'
}
jar {
dependsOn copyDependantLibs
}
}
project ( ':connect:mirror-client' ) {
archivesBaseName = "connect-mirror-client"
dependencies {
compile project ( ':clients' )
compile libs . slf4jApi
testCompile libs . junitJupiter
KAFKA-7500: MirrorMaker 2.0 (KIP-382)
Implementation of [KIP-382 "MirrorMaker 2.0"](https://cwiki.apache.org/confluence/display/KAFKA/KIP-382%3A+MirrorMaker+2.0)
Author: Ryanne Dolan <ryannedolan@gmail.com>
Author: Arun Mathew <arunmathew88@gmail.com>
Author: In Park <inpark@cloudera.com>
Author: Andre Price <obsoleted@users.noreply.github.com>
Author: christian.hagel@rio.cloud <christian.hagel@rio.cloud>
Reviewers: Eno Thereska <eno.thereska@gmail.com>, William Hammond <william.t.hammond@gmail.com>, Viktor Somogyi <viktorsomogyi@gmail.com>, Jakub Korzeniowski, Tim Carey-Smith, Kamal Chandraprakash <kamal.chandraprakash@gmail.com>, Arun Mathew, Jeremy-l-ford, vpernin, Oleg Kasian <oleg.kasian@gmail.com>, Mickael Maison <mickael.maison@gmail.com>, Qihong Chen, Sriharsha Chintalapani <sriharsha@apache.org>, Jun Rao <junrao@gmail.com>, Randall Hauch <rhauch@gmail.com>, Manikumar Reddy <manikumar.reddy@gmail.com>, Ismael Juma <ismael@juma.me.uk>
Closes #6295 from ryannedolan/KIP-382
5 years ago
testCompile project ( ':clients' ) . sourceSets . test . output
testRuntime libs . slf4jlog4j
}
javadoc {
enabled = true
}
tasks . create ( name: "copyDependantLibs" , type: Copy ) {
from ( configurations . testRuntime ) {
include ( 'slf4j-log4j12*' )
include ( 'log4j*jar' )
}
from ( configurations . runtime ) {
exclude ( 'kafka-clients*' )
exclude ( 'connect-*' )
}
into "$buildDir/dependant-libs"
duplicatesStrategy 'exclude'
}
jar {
dependsOn copyDependantLibs
}
}
task aggregatedJavadoc ( type: Javadoc ) {
def projectsWithJavadoc = subprojects . findAll { it . javadoc . enabled }
source = projectsWithJavadoc . collect { it . sourceSets . main . allJava }
classpath = files ( projectsWithJavadoc . collect { it . sourceSets . main . compileClasspath } )
includes = projectsWithJavadoc . collectMany { it . javadoc . getIncludes ( ) }
excludes = projectsWithJavadoc . collectMany { it . javadoc . getExcludes ( ) }
// The URL structure was changed to include the locale after Java 8
if ( JavaVersion . current ( ) . isJava11Compatible ( ) )
options . links "https://docs.oracle.com/en/java/javase/${JavaVersion.current().majorVersion}/docs/api/"
else
options . links "https://docs.oracle.com/javase/8/docs/api/"
}