kafka-1619; perf dir can be removed; patched by Jun Rao; reviewed by Guozhang Wang, Neha Narkhede and Stevo Slavic

This commit is contained in:
Jun Rao 2014-09-14 10:16:07 -07:00
parent 3d3c544d6f
commit 8d0ab1de70
3 changed files with 7 additions and 22 deletions

View File

@ -48,7 +48,7 @@ The release file can be found inside ./core/build/distributions/.
./gradlew -PscalaVersion=2.9.1 releaseTarGz ./gradlew -PscalaVersion=2.9.1 releaseTarGz
### Running a task for a specific project ### ### Running a task for a specific project ###
This is for 'core', 'perf', 'contrib:hadoop-consumer', 'contrib:hadoop-producer', 'examples' and 'clients' This is for 'core', 'contrib:hadoop-consumer', 'contrib:hadoop-producer', 'examples' and 'clients'
./gradlew core:jar ./gradlew core:jar
./gradlew core:test ./gradlew core:test
@ -88,7 +88,7 @@ Please note for this to work you should create/update `~/.gradle/gradle.properti
### Contribution ### ### Contribution ###
Apache Kafka interested in building the community; we would welcome any thoughts or [patches](https://issues.apache.org/jira/browse/KAFKA). You can reach us [on the Apache mailing lists](http://kafka.apache.org/contact.html). Apache Kafka is interested in building the community; we would welcome any thoughts or [patches](https://issues.apache.org/jira/browse/KAFKA). You can reach us [on the Apache mailing lists](http://kafka.apache.org/contact.html).
To contribute follow the instructions here: To contribute follow the instructions here:
* http://kafka.apache.org/contributing.html * http://kafka.apache.org/contributing.html

View File

@ -164,12 +164,12 @@ for ( sv in ['2_9_1', '2_9_2', '2_10_1', '2_11'] ) {
} }
} }
tasks.create(name: "jarAll", dependsOn: ['jar_core_2_9_1', 'jar_core_2_9_2', 'jar_core_2_10_1', 'jar_core_2_11', 'clients:jar', 'perf:jar', 'examples:jar', 'contrib:hadoop-consumer:jar', 'contrib:hadoop-producer:jar']) { tasks.create(name: "jarAll", dependsOn: ['jar_core_2_9_1', 'jar_core_2_9_2', 'jar_core_2_10_1', 'jar_core_2_11', 'clients:jar', 'examples:jar', 'contrib:hadoop-consumer:jar', 'contrib:hadoop-producer:jar']) {
} }
tasks.create(name: "srcJarAll", dependsOn: ['srcJar_2_9_1', 'srcJar_2_9_2', 'srcJar_2_10_1', 'srcJar_2_11', 'clients:srcJar', 'perf:srcJar', 'examples:srcJar', 'contrib:hadoop-consumer:srcJar', 'contrib:hadoop-producer:srcJar']) { } tasks.create(name: "srcJarAll", dependsOn: ['srcJar_2_9_1', 'srcJar_2_9_2', 'srcJar_2_10_1', 'srcJar_2_11', 'clients:srcJar', 'examples:srcJar', 'contrib:hadoop-consumer:srcJar', 'contrib:hadoop-producer:srcJar']) { }
tasks.create(name: "docsJarAll", dependsOn: ['docsJar_2_9_1', 'docsJar_2_9_2', 'docsJar_2_10_1', 'docsJar_2_11', 'clients:docsJar', 'perf:docsJar', 'examples:docsJar', 'contrib:hadoop-consumer:docsJar', 'contrib:hadoop-producer:docsJar']) { } tasks.create(name: "docsJarAll", dependsOn: ['docsJar_2_9_1', 'docsJar_2_9_2', 'docsJar_2_10_1', 'docsJar_2_11', 'clients:docsJar', 'examples:docsJar', 'contrib:hadoop-consumer:docsJar', 'contrib:hadoop-producer:docsJar']) { }
tasks.create(name: "testAll", dependsOn: ['test_core_2_9_1', 'test_core_2_9_2', 'test_core_2_10_1', 'test_core_2_11', 'clients:test']) { tasks.create(name: "testAll", dependsOn: ['test_core_2_9_1', 'test_core_2_9_2', 'test_core_2_10_1', 'test_core_2_11', 'clients:test']) {
} }
@ -177,7 +177,7 @@ tasks.create(name: "testAll", dependsOn: ['test_core_2_9_1', 'test_core_2_9_2',
tasks.create(name: "releaseTarGzAll", dependsOn: ['releaseTarGz_2_9_1', 'releaseTarGz_2_9_2', 'releaseTarGz_2_10_1', 'releaseTarGz_2_11']) { tasks.create(name: "releaseTarGzAll", dependsOn: ['releaseTarGz_2_9_1', 'releaseTarGz_2_9_2', 'releaseTarGz_2_10_1', 'releaseTarGz_2_11']) {
} }
tasks.create(name: "uploadArchivesAll", dependsOn: ['uploadCoreArchives_2_9_1', 'uploadCoreArchives_2_9_2', 'uploadCoreArchives_2_10_1', 'uploadCoreArchives_2_11', 'clients:uploadArchives', 'perf:uploadArchives', 'examples:uploadArchives', 'contrib:hadoop-consumer:uploadArchives', 'contrib:hadoop-producer:uploadArchives']) { tasks.create(name: "uploadArchivesAll", dependsOn: ['uploadCoreArchives_2_9_1', 'uploadCoreArchives_2_9_2', 'uploadCoreArchives_2_10_1', 'uploadCoreArchives_2_11', 'clients:uploadArchives', 'examples:uploadArchives', 'contrib:hadoop-consumer:uploadArchives', 'contrib:hadoop-producer:uploadArchives']) {
} }
project(':core') { project(':core') {
@ -266,21 +266,6 @@ project(':core') {
} }
} }
project(':perf') {
println "Building project 'perf' with Scala version $scalaVersion"
apply plugin: 'scala'
archivesBaseName = "kafka-perf_${baseScalaVersion}"
dependencies {
compile project(':core')
compile "org.scala-lang:scala-library:$scalaVersion"
compile 'net.sf.jopt-simple:jopt-simple:3.2'
zinc 'com.typesafe.zinc:zinc:0.3.1'
}
}
project(':contrib:hadoop-consumer') { project(':contrib:hadoop-consumer') {
archivesBaseName = "kafka-hadoop-consumer" archivesBaseName = "kafka-hadoop-consumer"

View File

@ -14,4 +14,4 @@
// limitations under the License. // limitations under the License.
apply from: file('scala.gradle') apply from: file('scala.gradle')
include 'core', 'perf', 'contrib:hadoop-consumer', 'contrib:hadoop-producer', 'examples', 'clients' include 'core', 'contrib:hadoop-consumer', 'contrib:hadoop-producer', 'examples', 'clients'