MINOR: Move streams-examples source files under src folder

Also remove some unused imports.

Author: Guozhang Wang <wangguoz@gmail.com>

Reviewers: Ismael Juma <ismael@juma.me.uk>, Ewen Cheslack-Postava <ewen@confluent.io>

Closes #992 from guozhangwang/KSExamples
This commit is contained in:
Guozhang Wang 2016-03-01 18:53:58 -08:00 committed by Ewen Cheslack-Postava
parent 79662cc7cb
commit edeb11bc56
11 changed files with 21 additions and 15 deletions

View File

@ -52,6 +52,11 @@ do
CLASSPATH=$CLASSPATH:$file CLASSPATH=$CLASSPATH:$file
done done
for file in $base_dir/streams/examples/build/libs/kafka-streams-examples*.jar;
do
CLASSPATH=$CLASSPATH:$file
done
for file in $base_dir/streams/build/dependant-libs-${SCALA_VERSION}/rocksdb*.jar; for file in $base_dir/streams/build/dependant-libs-${SCALA_VERSION}/rocksdb*.jar;
do do
CLASSPATH=$CLASSPATH:$file CLASSPATH=$CLASSPATH:$file

View File

@ -259,7 +259,7 @@ for ( sv in ['2_10', '2_11'] ) {
} }
def connectPkgs = ['connect:api', 'connect:runtime', 'connect:json', 'connect:file'] def connectPkgs = ['connect:api', 'connect:runtime', 'connect:json', 'connect:file']
def pkgs = ['clients', 'examples', 'log4j-appender', 'tools', 'streams'] + connectPkgs def pkgs = ['clients', 'examples', 'log4j-appender', 'tools', 'streams', 'streams:examples'] + connectPkgs
tasks.create(name: "jarConnect", dependsOn: connectPkgs.collect { it + ":jar" }) {} tasks.create(name: "jarConnect", dependsOn: connectPkgs.collect { it + ":jar" }) {}
tasks.create(name: "jarAll", dependsOn: ['jar_core_2_10', 'jar_core_2_11'] + pkgs.collect { it + ":jar" }) { } tasks.create(name: "jarAll", dependsOn: ['jar_core_2_10', 'jar_core_2_11'] + pkgs.collect { it + ":jar" }) { }
@ -374,6 +374,7 @@ project(':core') {
from(project(':connect:file').jar) { into("libs/") } from(project(':connect:file').jar) { into("libs/") }
from(project(':connect:file').configurations.runtime) { into("libs/") } from(project(':connect:file').configurations.runtime) { into("libs/") }
from(project(':streams').jar) { into("libs/") } from(project(':streams').jar) { into("libs/") }
from(project(':streams:examples').jar) { into("libs/") }
} }
jar { jar {

View File

@ -127,6 +127,11 @@
<allow pkg="org.apache.kafka.streams"/> <allow pkg="org.apache.kafka.streams"/>
<subpackage name="examples">
<allow pkg="com.fasterxml.jackson.databind" />
<allow pkg="org.apache.kafka.connect.json" />
</subpackage>
<subpackage name="state"> <subpackage name="state">
<allow pkg="org.rocksdb" /> <allow pkg="org.rocksdb" />
</subpackage> </subpackage>

View File

@ -17,7 +17,6 @@
package org.apache.kafka.streams.examples.pageview; package org.apache.kafka.streams.examples.pageview;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.Deserializer; import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.LongDeserializer; import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.LongSerializer; import org.apache.kafka.common.serialization.LongSerializer;

View File

@ -20,7 +20,6 @@ package org.apache.kafka.streams.examples.pageview;
import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.Deserializer; import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.LongDeserializer; import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.LongSerializer; import org.apache.kafka.common.serialization.LongSerializer;
@ -93,15 +92,15 @@ public class PageViewUntypedJob {
KStream<JsonNode, JsonNode> regionCount = views KStream<JsonNode, JsonNode> regionCount = views
.leftJoin(userRegions, new ValueJoiner<JsonNode, String, JsonNode>() { .leftJoin(userRegions, new ValueJoiner<JsonNode, String, JsonNode>() {
@Override @Override
public JsonNode apply(JsonNode view, String region) { public JsonNode apply(JsonNode view, String region) {
ObjectNode jNode = JsonNodeFactory.instance.objectNode(); ObjectNode jNode = JsonNodeFactory.instance.objectNode();
return jNode.put("user", view.get("user").textValue()) return jNode.put("user", view.get("user").textValue())
.put("page", view.get("page").textValue()) .put("page", view.get("page").textValue())
.put("region", region == null ? "UNKNOWN" : region); .put("region", region == null ? "UNKNOWN" : region);
} }
}) })
.map(new KeyValueMapper<String, JsonNode, KeyValue<String, JsonNode>>() { .map(new KeyValueMapper<String, JsonNode, KeyValue<String, JsonNode>>() {
@Override @Override
public KeyValue<String, JsonNode> apply(String user, JsonNode viewRegion) { public KeyValue<String, JsonNode> apply(String user, JsonNode viewRegion) {

View File

@ -17,7 +17,6 @@
package org.apache.kafka.streams.examples.pipe; package org.apache.kafka.streams.examples.pipe;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer; import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.kstream.KStreamBuilder; import org.apache.kafka.streams.kstream.KStreamBuilder;

View File

@ -17,7 +17,6 @@
package org.apache.kafka.streams.examples.wordcount; package org.apache.kafka.streams.examples.wordcount;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.Deserializer; import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.LongDeserializer; import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.LongSerializer; import org.apache.kafka.common.serialization.LongSerializer;

View File

@ -17,7 +17,6 @@
package org.apache.kafka.streams.examples.wordcount; package org.apache.kafka.streams.examples.wordcount;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer; import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.KeyValue;
@ -64,7 +63,7 @@ public class WordCountProcessorJob {
@Override @Override
public void process(String dummy, String line) { public void process(String dummy, String line) {
String words[] = line.toLowerCase().split(" "); String[] words = line.toLowerCase().split(" ");
for (String word : words) { for (String word : words) {
Integer oldValue = this.kvStore.get(word); Integer oldValue = this.kvStore.get(word);