From 8d0ab1de70f3361d26e1e2cbfc4363f5ca45d4c2 Mon Sep 17 00:00:00 2001 From: Jun Rao Date: Sun, 14 Sep 2014 10:16:07 -0700 Subject: [PATCH] kafka-1619; perf dir can be removed; patched by Jun Rao; reviewed by Guozhang Wang, Neha Narkhede and Stevo Slavic --- README.md | 4 ++-- build.gradle | 23 ++++------------------- settings.gradle | 2 +- 3 files changed, 7 insertions(+), 22 deletions(-) diff --git a/README.md b/README.md index 8cd5cfd1e04..544d475caf3 100644 --- a/README.md +++ b/README.md @@ -48,7 +48,7 @@ The release file can be found inside ./core/build/distributions/. ./gradlew -PscalaVersion=2.9.1 releaseTarGz ### Running a task for a specific project ### -This is for 'core', 'perf', 'contrib:hadoop-consumer', 'contrib:hadoop-producer', 'examples' and 'clients' +This is for 'core', 'contrib:hadoop-consumer', 'contrib:hadoop-producer', 'examples' and 'clients' ./gradlew core:jar ./gradlew core:test @@ -88,7 +88,7 @@ Please note for this to work you should create/update `~/.gradle/gradle.properti ### Contribution ### -Apache Kafka interested in building the community; we would welcome any thoughts or [patches](https://issues.apache.org/jira/browse/KAFKA). You can reach us [on the Apache mailing lists](http://kafka.apache.org/contact.html). +Apache Kafka is interested in building the community; we would welcome any thoughts or [patches](https://issues.apache.org/jira/browse/KAFKA). You can reach us [on the Apache mailing lists](http://kafka.apache.org/contact.html). To contribute follow the instructions here: * http://kafka.apache.org/contributing.html diff --git a/build.gradle b/build.gradle index 6d6f1a4349d..d0bfe2bcd8f 100644 --- a/build.gradle +++ b/build.gradle @@ -164,12 +164,12 @@ for ( sv in ['2_9_1', '2_9_2', '2_10_1', '2_11'] ) { } } -tasks.create(name: "jarAll", dependsOn: ['jar_core_2_9_1', 'jar_core_2_9_2', 'jar_core_2_10_1', 'jar_core_2_11', 'clients:jar', 'perf:jar', 'examples:jar', 'contrib:hadoop-consumer:jar', 'contrib:hadoop-producer:jar']) { +tasks.create(name: "jarAll", dependsOn: ['jar_core_2_9_1', 'jar_core_2_9_2', 'jar_core_2_10_1', 'jar_core_2_11', 'clients:jar', 'examples:jar', 'contrib:hadoop-consumer:jar', 'contrib:hadoop-producer:jar']) { } -tasks.create(name: "srcJarAll", dependsOn: ['srcJar_2_9_1', 'srcJar_2_9_2', 'srcJar_2_10_1', 'srcJar_2_11', 'clients:srcJar', 'perf:srcJar', 'examples:srcJar', 'contrib:hadoop-consumer:srcJar', 'contrib:hadoop-producer:srcJar']) { } +tasks.create(name: "srcJarAll", dependsOn: ['srcJar_2_9_1', 'srcJar_2_9_2', 'srcJar_2_10_1', 'srcJar_2_11', 'clients:srcJar', 'examples:srcJar', 'contrib:hadoop-consumer:srcJar', 'contrib:hadoop-producer:srcJar']) { } -tasks.create(name: "docsJarAll", dependsOn: ['docsJar_2_9_1', 'docsJar_2_9_2', 'docsJar_2_10_1', 'docsJar_2_11', 'clients:docsJar', 'perf:docsJar', 'examples:docsJar', 'contrib:hadoop-consumer:docsJar', 'contrib:hadoop-producer:docsJar']) { } +tasks.create(name: "docsJarAll", dependsOn: ['docsJar_2_9_1', 'docsJar_2_9_2', 'docsJar_2_10_1', 'docsJar_2_11', 'clients:docsJar', 'examples:docsJar', 'contrib:hadoop-consumer:docsJar', 'contrib:hadoop-producer:docsJar']) { } tasks.create(name: "testAll", dependsOn: ['test_core_2_9_1', 'test_core_2_9_2', 'test_core_2_10_1', 'test_core_2_11', 'clients:test']) { } @@ -177,7 +177,7 @@ tasks.create(name: "testAll", dependsOn: ['test_core_2_9_1', 'test_core_2_9_2', tasks.create(name: "releaseTarGzAll", dependsOn: ['releaseTarGz_2_9_1', 'releaseTarGz_2_9_2', 'releaseTarGz_2_10_1', 'releaseTarGz_2_11']) { } -tasks.create(name: "uploadArchivesAll", dependsOn: ['uploadCoreArchives_2_9_1', 'uploadCoreArchives_2_9_2', 'uploadCoreArchives_2_10_1', 'uploadCoreArchives_2_11', 'clients:uploadArchives', 'perf:uploadArchives', 'examples:uploadArchives', 'contrib:hadoop-consumer:uploadArchives', 'contrib:hadoop-producer:uploadArchives']) { +tasks.create(name: "uploadArchivesAll", dependsOn: ['uploadCoreArchives_2_9_1', 'uploadCoreArchives_2_9_2', 'uploadCoreArchives_2_10_1', 'uploadCoreArchives_2_11', 'clients:uploadArchives', 'examples:uploadArchives', 'contrib:hadoop-consumer:uploadArchives', 'contrib:hadoop-producer:uploadArchives']) { } project(':core') { @@ -266,21 +266,6 @@ project(':core') { } } -project(':perf') { - println "Building project 'perf' with Scala version $scalaVersion" - - apply plugin: 'scala' - archivesBaseName = "kafka-perf_${baseScalaVersion}" - - dependencies { - compile project(':core') - compile "org.scala-lang:scala-library:$scalaVersion" - compile 'net.sf.jopt-simple:jopt-simple:3.2' - - zinc 'com.typesafe.zinc:zinc:0.3.1' - } -} - project(':contrib:hadoop-consumer') { archivesBaseName = "kafka-hadoop-consumer" diff --git a/settings.gradle b/settings.gradle index 6041784d6f8..83f764e6a4a 100644 --- a/settings.gradle +++ b/settings.gradle @@ -14,4 +14,4 @@ // limitations under the License. apply from: file('scala.gradle') -include 'core', 'perf', 'contrib:hadoop-consumer', 'contrib:hadoop-producer', 'examples', 'clients' +include 'core', 'contrib:hadoop-consumer', 'contrib:hadoop-producer', 'examples', 'clients'